diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index c465f8de0fb..c5fcbf50fca 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,11 +1,8 @@
 blank_issues_enabled: true
 contact_links:
-  - name: Salt Community Slack
-    url: https://saltstackcommunity.slack.com/
+  - name: Salt Community Discord
+    url: https://discord.com/invite/J7b7EscrAs
     about: Please ask and answer questions here.
   - name: Salt-Users Forum
     url: https://groups.google.com/forum/#!forum/salt-users
     about: Please ask and answer questions here.
-  - name: Salt on LiberaChat
-    url: https://web.libera.chat/#salt
-    about: Please ask and answer questions here.
diff --git a/.github/ISSUE_TEMPLATE/tech-debt.md b/.github/ISSUE_TEMPLATE/tech-debt.md
index a13303e3ee1..0fe65bff3c9 100644
--- a/.github/ISSUE_TEMPLATE/tech-debt.md
+++ b/.github/ISSUE_TEMPLATE/tech-debt.md
@@ -8,7 +8,7 @@ assignees: ''
 ---
 
 ### Description of the tech debt to be addressed, include links and screenshots
-<!-- Note: Please direct questions to the salt-users google group, IRC or Community Slack. -->
+<!-- Note: Please direct questions to the salt-users google group, GitHub Discussions or Community Discord. -->
 
 ### Versions Report
 (Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index f06d37e9e14..d707b6c8848 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -11,7 +11,9 @@ Remove this section if not relevant
 
 ### Merge requirements satisfied?
 **[NOTICE] Bug fixes or features added to Salt require tests.**
-<!-- Please review the [test documentation](https://docs.saltproject.io/en/master/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite. -->
+<!-- Please review the test documentation for details on how to implement tests
+into Salt's test suite:
+https://docs.saltproject.io/en/master/topics/tutorials/writing_tests.html -->
 - [ ] Docs
 - [ ] Changelog - https://docs.saltproject.io/en/master/topics/development/changelog.html
 - [ ] Tests written/updated
@@ -19,7 +21,13 @@ Remove this section if not relevant
 ### Commits signed with GPG?
 Yes/No
 
-Please review [Salt's Contributing Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) for best practices, including the
-[PR Guidelines](https://docs.saltproject.io/en/master/topics/development/pull_requests.html).
+<!-- Please review Salt's Contributing Guide for best practices and guidance in
+choosing the right branch:
+https://docs.saltproject.io/en/master/topics/development/contributing.html -->
 
-See GitHub's [page on GPG signing](https://help.github.com/articles/signing-commits-using-gpg/) for more information about signing commits with GPG.
+<!-- Additional guidance for pull requests can be found here:
+https://docs.saltproject.io/en/master/topics/development/pull_requests.html -->
+
+<!-- See GitHub's page on GPG signing for more information about signing commits
+with GPG:
+https://help.github.com/articles/signing-commits-using-gpg/ -->
diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml
index f37fdbea969..b651ab58f18 100644
--- a/.github/actionlint.yaml
+++ b/.github/actionlint.yaml
@@ -1,14 +1,5 @@
 self-hosted-runner:
   # Labels of self-hosted runner in array of string
   labels:
-    - bastion
-    - x86_64
-    - arm64
-    - aarch64
-    - amd64
-    - repo-nightly
-    - repo-staging
-    - repo-release
-    - medium
-    - large
-    - macos-13-xlarge
+    - linux-x86_64
+    - linux-arm64
diff --git a/.github/actions/cache/action.yml b/.github/actions/cache/action.yml
index 020b9d1e6b8..b8bea242cf0 100644
--- a/.github/actions/cache/action.yml
+++ b/.github/actions/cache/action.yml
@@ -26,10 +26,6 @@ inputs:
     description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
     default: 'false'
     required: false
-  save-always:
-    description: 'Run the post step to save the cache even if another step before fails'
-    default: 'false'
-    required: false
 
 outputs:
   cache-hit:
@@ -49,7 +45,6 @@ runs:
         echo "GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE=${{ inputs.enableCrossOsArchive }}" | tee -a "${GITHUB_ENV}"
         echo "GHA_CACHE_FAIL_ON_CACHE_MISS=${{ inputs.fail-on-cache-miss }}" | tee -a "${GITHUB_ENV}"
         echo "GHA_CACHE_LOOKUP_ONLY=${{ inputs.lookup-only }}" | tee -a "${GITHUB_ENV}"
-        echo "GHA_CACHE_SAVE_ALWAYS=${{ inputs.save-always }}" | tee -a "${GITHUB_ENV}"
         echo "GHA_CACHE_RESTORE_KEYS=${{ inputs.restore-keys }}" | tee -a "${GITHUB_ENV}"
         echo "GHA_CACHE_UPLOAD_CHUNK_SIZE=${{ inputs.upload-chunk-size }}" | tee -a "${GITHUB_ENV}"
 
@@ -63,7 +58,6 @@ runs:
         enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
         fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
         lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
-        save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }}
         restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
         upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}
 
@@ -97,7 +91,6 @@ runs:
         enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
         fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
         lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
-        save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }}
         restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
         upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}
 
diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml
index e640ffe86f7..21390a12c4a 100644
--- a/.github/actions/setup-python-tools-scripts/action.yml
+++ b/.github/actions/setup-python-tools-scripts/action.yml
@@ -54,10 +54,13 @@ runs:
       working-directory: ${{ inputs.cwd }}
       run: |
         PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }}
+        ${PYTHON_EXE} -m ensurepip --upgrade
         (${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1
         if [ $exitcode -eq 0 ]; then
+          ${PYTHON_EXE} -m pip install --break-system-packages --upgrade setuptools
           ${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
         else
+          ${PYTHON_EXE} -m pip install --upgrade setuptools
           ${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
         fi
 
diff --git a/.github/actions/ssh-tunnel/README.md b/.github/actions/ssh-tunnel/README.md
new file mode 100644
index 00000000000..f6f03e5b2d5
--- /dev/null
+++ b/.github/actions/ssh-tunnel/README.md
@@ -0,0 +1,94 @@
+# SSH Tunnel
+
+The ssh-tunnel action will create a reverse tunnel over webrtc to port 22 on the runner.
+
+## Usage
+
+In order to use this action you must have a sdp offer from your local host and a ssh key pair.
+Start with creating an sdp offer on your local machine. Provide these values to the ssh-tunnel
+action and wait for output from the action with the sdp reply. Provide the reply to the local
+rtcforward.py process by pasting it to stdin. If all goes well the local port on your maching
+will be forwarded to the ssh port on the runner.
+
+### Getting an sdp offer
+
+To get an sdp offer start rtcforward.py on you local machine with the offer command.
+You can also specify which port on the local machine will be used for the tunnel.
+
+``` bash
+$ python3 .github/actions/ssh-tunnel/rtcforward.py offer --port 5222
+```
+
+rtcforward.py will create an offer an display it to your terminal. (This example offer has been truncated)
+After showing the offer the `rtcforward.py` process will wait for a reply.
+```
+-- offer --
+eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg4NjUzIDM5NDczODg2NTMgSU4gSVA0IDAuMC4wLjBcclxu
+cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
+bm09YXBwbGljYXRpb24gMzUyNjkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC4wLjIw
+IHVkcCAxNjk0NDk4ODE1IDE4NC4xNzkuMjEwLjE1MiAzNTI2OSB0eXAgc3JmbHggcmFkZHIgMTkyLjE2
+OC4wLjIwMSBycG9ydCAzNTI2OVxyXG5hPWNhbmRpZGF0ZTozZWFjMzJiZTZkY2RkMTAwZDcwMTFiNWY0
+NTo4Qzo2MDoxMTpFQTo3NzpDMTo5RTo1QTo3QzpDQzowRDowODpFQzo2NDowQToxM1xyXG5hPWZpbmdl
+cnByaW50OnNoYS01MTIgNjY6MzI6RUQ6MDA6N0I6QjY6NTQ6NzA6MzE6OTA6M0I6Mjg6Q0I6QTk6REU6
+MzQ6QjI6NDY6NzE6NUI6MjM6ODA6Nzg6Njg6RDA6QTA6QTg6MjU6QkY6MDQ6ODY6NUY6OTA6QUY6MUQ6
+QjA6QzY6ODA6QUY6OTc6QTI6MkM6NDI6QUU6MkI6Q0Q6Mjk6RUQ6MkI6ODc6NTU6ODg6NDY6QTM6ODk6
+OEY6ODk6OTE6QTE6QTI6NDM6NTc6M0E6MjZcclxuYT1zZXR1cDphY3RwYXNzXHJcbiIsICJ0eXBlIjog
+Im9mZmVyIn0=
+-- end offer --
+-- Please enter a message from remote party --
+```
+
+### Getting an sdp answer
+
+Provide the offer to the ssh-tunnel action. When the action runs, an answer to the offer will be generated.
+In the action output you will see that the offer was recieved and the reply in the output.
+
+```
+-- Please enter a message from remote party --
+-- Message received --
+-- reply --
+eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg3NDcxIDM5NDczODc0NzEgSU4gSVA0IDAuMC4wLjBcclxu
+cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
+bm09YXBwbGljYXRpb24gNTcwMzkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC42NC4x
+MFxyXG5hPW1pZDowXHJcbmE9c2N0cG1hcDo1MDAwIHdlYnJ0Yy1kYXRhY2hhbm5lbCA2NTUzNVxyXG5h
+MTc6MEI6RTA6OTA6QUM6RjU6RTk6RUI6Q0E6RUE6NTY6REI6NTA6QTk6REY6NTU6MzY6MkM6REI6OUE6
+MDc6Mzc6QTM6NDc6NjlcclxuYT1maW5nZXJwcmludDpzaGEtNTEyIDMyOjRDOjk0OkRDOjNFOkU5OkU3
+OjNCOjc5OjI4OjZDOjc5OkFEOkVDOjIzOkJDOjRBOjRBOjE5OjlCOjg5OkE3OkE2OjZBOjAwOjJFOkM5
+OkE0OjlEOjAwOjM0OjFFOjRDOkVGOjcwOkY5OkNBOjg0OjlEOjcxOjI5OkVCOkIxOkREOkFEOjg5OjUx
+OkZFOjhCOjI3OjFDOjFBOkJEOjUxOjQ2OjE4OjBBOjhFOjVBOjI1OjQzOjQzOjZGOkRBXHJcbmE9c2V0
+dXA6YWN0aXZlXHJcbiIsICJ0eXBlIjogImFuc3dlciJ9
+-- end reply --
+```
+
+# Finalizing the tunnel
+
+Paste the sdp reply from the running action into the running `rtcforward.py` process that created the offer.
+After receiveing the offer you will see `-- Message received --` and tunnel will be created.
+
+```
+-- offer --
+eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg4NjUzIDM5NDczODg2NTMgSU4gSVA0IDAuMC4wLjBcclxu
+cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
+bm09YXBwbGljYXRpb24gMzUyNjkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC4wLjIw
+IHVkcCAxNjk0NDk4ODE1IDE4NC4xNzkuMjEwLjE1MiAzNTI2OSB0eXAgc3JmbHggcmFkZHIgMTkyLjE2
+OC4wLjIwMSBycG9ydCAzNTI2OVxyXG5hPWNhbmRpZGF0ZTozZWFjMzJiZTZkY2RkMTAwZDcwMTFiNWY0
+NTo4Qzo2MDoxMTpFQTo3NzpDMTo5RTo1QTo3QzpDQzowRDowODpFQzo2NDowQToxM1xyXG5hPWZpbmdl
+cnByaW50OnNoYS01MTIgNjY6MzI6RUQ6MDA6N0I6QjY6NTQ6NzA6MzE6OTA6M0I6Mjg6Q0I6QTk6REU6
+MzQ6QjI6NDY6NzE6NUI6MjM6ODA6Nzg6Njg6RDA6QTA6QTg6MjU6QkY6MDQ6ODY6NUY6OTA6QUY6MUQ6
+QjA6QzY6ODA6QUY6OTc6QTI6MkM6NDI6QUU6MkI6Q0Q6Mjk6RUQ6MkI6ODc6NTU6ODg6NDY6QTM6ODk6
+OEY6ODk6OTE6QTE6QTI6NDM6NTc6M0E6MjZcclxuYT1zZXR1cDphY3RwYXNzXHJcbiIsICJ0eXBlIjog
+Im9mZmVyIn0=
+-- end offer --
+-- Please enter a message from remote party --
+eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg3NDcxIDM5NDczODc0NzEgSU4gSVA0IDAuMC4wLjBcclxu
+cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
+bm09YXBwbGljYXRpb24gNTcwMzkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC42NC4x
+MFxyXG5hPW1pZDowXHJcbmE9c2N0cG1hcDo1MDAwIHdlYnJ0Yy1kYXRhY2hhbm5lbCA2NTUzNVxyXG5h
+MTc6MEI6RTA6OTA6QUM6RjU6RTk6RUI6Q0E6RUE6NTY6REI6NTA6QTk6REY6NTU6MzY6MkM6REI6OUE6
+MDc6Mzc6QTM6NDc6NjlcclxuYT1maW5nZXJwcmludDpzaGEtNTEyIDMyOjRDOjk0OkRDOjNFOkU5OkU3
+OjNCOjc5OjI4OjZDOjc5OkFEOkVDOjIzOkJDOjRBOjRBOjE5OjlCOjg5OkE3OkE2OjZBOjAwOjJFOkM5
+OkE0OjlEOjAwOjM0OjFFOjRDOkVGOjcwOkY5OkNBOjg0OjlEOjcxOjI5OkVCOkIxOkREOkFEOjg5OjUx
+OkZFOjhCOjI3OjFDOjFBOkJEOjUxOjQ2OjE4OjBBOjhFOjVBOjI1OjQzOjQzOjZGOkRBXHJcbmE9c2V0
+dXA6YWN0aXZlXHJcbiIsICJ0eXBlIjogImFuc3dlciJ9
+-- Message received --
+```
diff --git a/.github/config.yml b/.github/config.yml
index 6bf3cadf069..1d916579c6c 100644
--- a/.github/config.yml
+++ b/.github/config.yml
@@ -11,18 +11,16 @@ newIssueWelcomeComment: >
   Also, check out some of our community
   resources including:
 
-    - [Community Wiki](https://github.com/saltstack/community/wiki)
     - [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
-    - [Join our Community Slack](https://via.vmw.com/salt-slack)
-    - [IRC on LiberaChat](https://web.libera.chat/#salt)
+    - [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
     - [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
-    - [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss)
+    - [Community Wiki](https://github.com/saltstack/community/wiki)
 
   There are lots of ways to get involved in our community. Every month, there are around a dozen
   opportunities to meet with other contributors and the Salt Core team and collaborate in real
   time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
 
-  If you have additional questions, email us at saltproject@vmware.com. We’re glad
+  If you have additional questions, email us at saltproject.pdl@broadcom.com. We’re glad
   you’ve joined our community and look forward to doing awesome things with
   you!
 
@@ -37,18 +35,16 @@ newPRWelcomeComment: >
   Also, check out some of our community
   resources including:
 
-    - [Community Wiki](https://github.com/saltstack/community/wiki)
     - [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
-    - [Join our Community Slack](https://via.vmw.com/salt-slack)
-    - [IRC on LiberaChat](https://web.libera.chat/#salt)
+    - [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
     - [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
-    - [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss)
+    - [Community Wiki](https://github.com/saltstack/community/wiki)
 
   There are lots of ways to get involved in our community. Every month, there are around a dozen
   opportunities to meet with other contributors and the Salt Core team and collaborate in real
   time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
 
-  If you have additional questions, email us at saltproject@vmware.com. We’re glad
+  If you have additional questions, email us at saltproject.pdl@broadcom.com. We’re glad
   you’ve joined our community and look forward to doing awesome things with
   you!
 
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
index 0f61ba09040..31345a58b6e 100644
--- a/.github/workflows/backport.yml
+++ b/.github/workflows/backport.yml
@@ -20,12 +20,14 @@ jobs:
       github.event.pull_request.merged == true
       && (
         contains(github.event.pull_request.labels.*.name, 'backport:master') ||
+        contains(github.event.pull_request.labels.*.name, 'backport:3007.x') ||
         contains(github.event.pull_request.labels.*.name, 'backport:3006.x') ||
         contains(github.event.pull_request.labels.*.name, 'backport:3005.x')
       )
       && (
         (github.event.action == 'labeled' && (
           contains(github.event.pull_request.labels.*.name, 'backport:master') ||
+          contains(github.event.pull_request.labels.*.name, 'backport:3007.x') ||
           contains(github.event.pull_request.labels.*.name, 'backport:3006.x') ||
           contains(github.event.pull_request.labels.*.name, 'backport:3005.x')
         ))
diff --git a/.github/workflows/build-deps-ci-action.yml b/.github/workflows/build-deps-ci-action.yml
index a7d2d3da5fa..5ccabd6d822 100644
--- a/.github/workflows/build-deps-ci-action.yml
+++ b/.github/workflows/build-deps-ci-action.yml
@@ -34,6 +34,14 @@ on:
         type: string
         description: The onedir package name to use
         default: salt
+      matrix:
+        required: true
+        type: string
+        description: Json job matrix config
+      linux_arm_runner:
+        required: true
+        type: string
+        description: Json job matrix config
 
 
 env:
@@ -48,52 +56,22 @@ env:
 
 jobs:
 
-  generate-matrix:
-    name: Generate Matrix
-    runs-on: ubuntu-latest
-    outputs:
-      matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
-    env:
-      PIP_INDEX_URL: https://pypi.org/simple
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-        env:
-          PIP_INDEX_URL: https://pypi.org/simple
-
-      - name: Generate Test Matrix
-        id: generate-matrix
-        run: |
-          tools ci deps-matrix
-
-
   linux-dependencies:
     name: Linux
-    needs:
-      - generate-matrix
+    if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
     runs-on:
-      - self-hosted
-      - linux
-      - bastion
+      - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
     env:
-      USE_S3_CACHE: 'true'
+      USE_S3_CACHE: 'false'
     timeout-minutes: 90
     strategy:
       fail-fast: false
       matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['linux'] }}
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
     steps:
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.10'
 
       - name: "Throttle Builds"
         shell: bash
@@ -103,6 +81,10 @@ jobs:
       - name: Checkout Source Code
         uses: actions/checkout@v4
 
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.10'
+
       - name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }}
         id: nox-dependencies-cache
         uses: ./.github/actions/cache
@@ -136,53 +118,34 @@ jobs:
         with:
           cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
 
-      - name: Get Salt Project GitHub Actions Bot Environment
+      - name: Install System Dependencies
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
+          echo true
 
-      - name: Start VM
-        if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        id: spin-up-vm
-        run: |
-          tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
-
-      - name: List Free Space
+      - name: Install Nox
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
-
-      - name: Upload Checkout To VM
-        if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        run: |
-          tools --timestamps vm rsync ${{ matrix.distro-slug }}
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
 
       - name: Install Dependencies
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
+        env:
+          PRINT_TEST_SELECTION: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RELENV_BUILDENV: "1"
         run: |
-          tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }}
+          nox --install-only -e ${{ inputs.nox-session }}
 
       - name: Cleanup .nox Directory
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }}
+          nox --force-color -e "pre-archive-cleanup(pkg=False)"
 
       - name: Compress .nox Directory
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }}
-
-      - name: Download Compressed .nox Directory
-        if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        run: |
-          tools --timestamps vm download-dependencies ${{ matrix.distro-slug }}
-
-      - name: Destroy VM
-        if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        run: |
-          tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }}
+          nox --force-color -e compress-dependencies -- linux ${{ matrix.arch }}
 
       - name: Upload Nox Requirements Tarball
         uses: actions/upload-artifact@v4
@@ -192,14 +155,13 @@ jobs:
 
   macos-dependencies:
     name: MacOS
-    needs:
-      - generate-matrix
-    runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
+    runs-on: ${{ matrix.arch == 'x86_64' && 'macos-13' || 'macos-14' }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
     timeout-minutes: 90
     strategy:
       fail-fast: false
       matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['macos'] }}
+        include: ${{ fromJSON(inputs.matrix)['macos'] }}
     env:
       PIP_INDEX_URL: https://pypi.org/simple
     steps:
@@ -278,21 +240,19 @@ jobs:
           name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
           path: nox.macos.${{ matrix.arch }}.tar.*
 
+
   windows-dependencies:
-    needs:
-      - generate-matrix
     name: Windows
-    runs-on:
-      - self-hosted
-      - linux
-      - bastion
+    runs-on: windows-latest
+    if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
     env:
-      USE_S3_CACHE: 'true'
+      USE_S3_CACHE: 'false'
+      GITHUB_WORKSPACE: 'C:\Windows\Temp\testing'
     timeout-minutes: 90
     strategy:
       fail-fast: false
       matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['windows'] }}
+        include: ${{ fromJSON(inputs.matrix)['windows'] }}
     steps:
 
       - name: "Throttle Builds"
@@ -300,6 +260,10 @@ jobs:
         run: |
           t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
 
+      - name: "Show environment"
+        run: |
+          env
+
       - name: Checkout Source Code
         uses: actions/checkout@v4
 
@@ -325,10 +289,11 @@ jobs:
           cd artifacts
           tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
 
-      - name: PyPi Proxy
+      - name: Set up Python ${{ inputs.python-version }}
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        run: |
-          sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
 
       - name: Setup Python Tools Scripts
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
@@ -336,53 +301,33 @@ jobs:
         with:
           cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
 
-      - name: Get Salt Project GitHub Actions Bot Environment
+      - name: Install System Dependencies
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
+          echo true
 
-      - name: Start VM
-        if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        id: spin-up-vm
-        run: |
-          tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
-
-      - name: List Free Space
+      - name: Install Nox
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
-
-      - name: Upload Checkout To VM
-        if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        run: |
-          tools --timestamps vm rsync ${{ matrix.distro-slug }}
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
 
       - name: Install Dependencies
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
+        env:
+          PRINT_TEST_SELECTION: "0"
+          PRINT_SYSTEM_INFO: "0"
         run: |
-          tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }}
+          nox --install-only -e ${{ inputs.nox-session }}
 
       - name: Cleanup .nox Directory
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }}
+          nox --force-color -e "pre-archive-cleanup(pkg=False)"
 
       - name: Compress .nox Directory
         if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
         run: |
-          tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }}
-
-      - name: Download Compressed .nox Directory
-        if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        run: |
-          tools --timestamps vm download-dependencies ${{ matrix.distro-slug }}
-
-      - name: Destroy VM
-        if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
-        run: |
-          tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }}
+          nox --force-color -e compress-dependencies -- windows ${{ matrix.arch }}
 
       - name: Upload Nox Requirements Tarball
         uses: actions/upload-artifact@v4
diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml
index c9e024e8cd6..41d53f58c38 100644
--- a/.github/workflows/build-deps-onedir.yml
+++ b/.github/workflows/build-deps-onedir.yml
@@ -8,12 +8,6 @@ on:
         type: string
         required: true
         description: The Salt version to set prior to building packages.
-      github-hosted-runners:
-        type: boolean
-        required: true
-      self-hosted-runners:
-        type: boolean
-        required: true
       cache-seed:
         required: true
         type: string
@@ -26,6 +20,14 @@ on:
         required: true
         type: string
         description: The version of python to use with relenv
+      matrix:
+        required: true
+        type: string
+        description: Json job matrix config
+      linux_arm_runner:
+        required: true
+        type: string
+        description: Json job matrix config
 
 env:
   RELENV_DATA: "${{ github.workspace }}/.relenv"
@@ -41,20 +43,15 @@ jobs:
 
   build-deps-linux:
     name: Linux
-    if: ${{ inputs.self-hosted-runners }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
+    runs-on:
+      - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
     strategy:
       fail-fast: false
       matrix:
-        arch:
-          - x86_64
-          - arm64
-    runs-on:
-      - self-hosted
-      - linux
-      - medium
-      - ${{ matrix.arch }}
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
     env:
-      USE_S3_CACHE: 'true'
+      USE_S3_CACHE: 'false'
     steps:
       - name: "Fail"
         run: exit 1
@@ -66,6 +63,10 @@ jobs:
 
       - uses: actions/checkout@v4
 
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.10'
+
       - name: Setup Python Tools Scripts
         uses: ./.github/actions/setup-python-tools-scripts
         with:
@@ -91,19 +92,23 @@ jobs:
 
   build-deps-macos:
     name: macOS
-    if: ${{ inputs.github-hosted-runners }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
     strategy:
       fail-fast: false
       max-parallel: 2
       matrix:
-        arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
+        include: ${{ fromJSON(inputs.matrix)['macos'] }}
     runs-on:
-      - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
+      - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
     env:
       USE_S3_CACHE: 'false'
       PIP_INDEX_URL: https://pypi.org/simple
     steps:
 
+      - name: "Check cores"
+        shell: bash
+        run: sysctl -n hw.ncpu
+
       - name: "Throttle Builds"
         shell: bash
         run: |
@@ -141,14 +146,12 @@ jobs:
 
   build-deps-windows:
     name: Windows
-    if: ${{ inputs.github-hosted-runners }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
     strategy:
       fail-fast: false
       max-parallel: 2
       matrix:
-        arch:
-          - x86
-          - amd64
+        include: ${{ fromJSON(inputs.matrix)['windows'] }}
     runs-on: windows-latest
     env:
       USE_S3_CACHE: 'false'
diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml
index adeeb2fff67..3372769516a 100644
--- a/.github/workflows/build-docs.yml
+++ b/.github/workflows/build-docs.yml
@@ -24,19 +24,21 @@ jobs:
   build:
     name: Build
     runs-on:
-      - ubuntu-latest
+      - ubuntu-22.04
     strategy:
       fail-fast: false
       matrix:
         docs-output:
-          - linkcheck
-          - spellcheck
+         # XXX re-enable lintcheck and spellcheck then fix the errors
+         # - linkcheck
+         # - spellcheck
           - html
-          - epub
-          # - pdf
 
     steps:
       - uses: actions/checkout@v4
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.10'
 
       - name: Download Release Patch
         if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
diff --git a/.github/workflows/build-packages.yml b/.github/workflows/build-packages.yml
index 47a24bf2975..b78cc6e68a8 100644
--- a/.github/workflows/build-packages.yml
+++ b/.github/workflows/build-packages.yml
@@ -36,6 +36,14 @@ on:
         required: true
         type: string
         description: Seed used to invalidate caches
+      matrix:
+        required: true
+        type: string
+        description: Json job matrix config
+      linux_arm_runner:
+        required: true
+        type: string
+        description: Json job matrix config
 
 env:
   COLUMNS: 190
@@ -46,19 +54,199 @@ env:
 
 jobs:
 
+  build-deb-packages:
+    name: DEB
+    if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
+    runs-on:
+      - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
+
+    container:
+      image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12
+
+    steps:
+      # Checkout here so we can easily use custom actions
+      - uses: actions/checkout@v4
+
+      # We need a more recent rustc
+      - name: Install a more recent `rustc`
+        if: ${{ inputs.source == 'src' }}
+        uses: actions-rust-lang/setup-rust-toolchain@v1
+
+      - name: Set rust environment variables
+        if: ${{ inputs.source == 'src' }}
+        run: |
+          CARGO_HOME=${CARGO_HOME:-${HOME}/.cargo}
+          export CARGO_HOME
+          echo "CARGO_HOME=${CARGO_HOME}" | tee -a "${GITHUB_ENV}"
+          echo "${CARGO_HOME}/bin" | tee -a "${GITHUB_PATH}"
+
+      # Checkout here for the build process
+      - name: Checkout in build directory
+        uses: actions/checkout@v4
+        with:
+          path:
+            pkgs/checkout/
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
+          path: pkgs/checkout/artifacts/
+
+      - name: Download Release Patch
+        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
+        uses: actions/download-artifact@v4
+        with:
+          name: salt-${{ inputs.salt-version }}.patch
+          path: pkgs/checkout/
+
+      - name: Setup Python Tools Scripts
+        uses: ./.github/actions/setup-python-tools-scripts
+        with:
+          cwd: pkgs/checkout/
+          cache-prefix: ${{ inputs.cache-prefix }}
+
+      - name: Setup Salt Version
+        id: setup-salt-version
+        uses: ./.github/actions/setup-salt-version
+        with:
+          salt-version: "${{ inputs.salt-version }}"
+          cwd: pkgs/checkout/
+
+      - name: Configure Git
+        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
+        working-directory: pkgs/checkout/
+        run: |
+          tools pkg configure-git
+
+      - name: Apply release patch
+        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
+        working-directory: pkgs/checkout/
+        run: |
+          tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
+
+      - name: Build Deb
+        working-directory: pkgs/checkout/
+        run: |
+          tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
+              inputs.source == 'onedir' &&
+              format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
+              ||
+              format('--arch={0}', matrix.arch)
+          }}
+
+      - name: Cleanup
+        run: |
+          rm -rf pkgs/checkout/
+
+      - name: Set Artifact Name
+        id: set-artifact-name
+        run: |
+          if [ "${{ inputs.source }}" != "src" ]; then
+            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
+          else
+            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"
+          fi
+
+      - name: Upload DEBs
+        uses: actions/upload-artifact@v4
+        with:
+          name: ${{ steps.set-artifact-name.outputs.artifact-name }}
+          path: ${{ github.workspace }}/pkgs/*
+          retention-days: 7
+          if-no-files-found: error
+
+  build-rpm-packages:
+    name: RPM
+    if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
+    runs-on:
+      - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
+
+    container:
+      image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9
+
+    steps:
+      - uses: actions/checkout@v4
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
+          path: artifacts/
+
+      - name: Download Release Patch
+        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
+        uses: actions/download-artifact@v4
+        with:
+          name: salt-${{ inputs.salt-version }}.patch
+
+      - name: Setup Python Tools Scripts
+        uses: ./.github/actions/setup-python-tools-scripts
+        with:
+          cache-prefix: ${{ inputs.cache-prefix }}
+
+      - name: Setup Salt Version
+        id: setup-salt-version
+        uses: ./.github/actions/setup-salt-version
+        with:
+          salt-version: "${{ inputs.salt-version }}"
+
+      - name: Configure Git
+        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
+        run: |
+          tools pkg configure-git
+
+      - name: Apply release patch
+        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
+        run: |
+          tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
+
+      - name: Build RPM
+        run: |
+          tools pkg build rpm  --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
+              inputs.source == 'onedir' &&
+              format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
+              ||
+              format('--arch={0}', matrix.arch)
+          }}
+
+      - name: Set Artifact Name
+        id: set-artifact-name
+        run: |
+          if [ "${{ inputs.source }}" != "src" ]; then
+            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
+          else
+            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"
+          fi
+
+      - name: Upload RPMs
+        uses: actions/upload-artifact@v4
+        with:
+          name: ${{ steps.set-artifact-name.outputs.artifact-name }}
+          path: ~/rpmbuild/RPMS/${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}/*.rpm
+          retention-days: 7
+          if-no-files-found: error
+
   build-macos-pkgs:
     name: macOS
+    if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
     environment: ${{ inputs.environment }}
     strategy:
       fail-fast: false
       matrix:
-        arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
-        source:
-          - ${{ inputs.source }}
+        include: ${{ fromJSON(inputs.matrix)['macos'] }}
     env:
       PIP_INDEX_URL: https://pypi.org/simple
     runs-on:
-      - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
+      - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
 
     steps:
       - name: Check Package Signing Enabled
@@ -162,212 +350,15 @@ jobs:
           retention-days: 7
           if-no-files-found: error
 
-  build-deb-packages:
-    name: DEB
-    runs-on:
-      - self-hosted
-      - linux
-      - medium
-      - ${{ matrix.arch }}
-    strategy:
-      fail-fast: false
-      matrix:
-        arch:
-          - x86_64
-          - arm64
-        source:
-          - ${{ inputs.source }}
-
-    container:
-      image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12
-
-    steps:
-      # Checkout here so we can easily use custom actions
-      - uses: actions/checkout@v4
-
-      # We need a more recent rustc
-      - name: Install a more recent `rustc`
-        if: ${{ inputs.source == 'src' }}
-        uses: actions-rust-lang/setup-rust-toolchain@v1
-
-      - name: Set rust environment variables
-        if: ${{ inputs.source == 'src' }}
-        run: |
-          CARGO_HOME=${CARGO_HOME:-${HOME}/.cargo}
-          export CARGO_HOME
-          echo "CARGO_HOME=${CARGO_HOME}" | tee -a "${GITHUB_ENV}"
-          echo "${CARGO_HOME}/bin" | tee -a "${GITHUB_PATH}"
-
-      # Checkout here for the build process
-      - name: Checkout in build directory
-        uses: actions/checkout@v4
-        with:
-          path:
-            pkgs/checkout/
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
-          path: pkgs/checkout/artifacts/
-
-      - name: Download Release Patch
-        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ inputs.salt-version }}.patch
-          path: pkgs/checkout/
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cwd: pkgs/checkout/
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Setup Salt Version
-        id: setup-salt-version
-        uses: ./.github/actions/setup-salt-version
-        with:
-          salt-version: "${{ inputs.salt-version }}"
-          cwd: pkgs/checkout/
-
-      - name: Configure Git
-        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
-        working-directory: pkgs/checkout/
-        run: |
-          tools pkg configure-git
-
-      - name: Apply release patch
-        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
-        working-directory: pkgs/checkout/
-        run: |
-          tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
-
-      - name: Build Deb
-        working-directory: pkgs/checkout/
-        run: |
-          tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
-              inputs.source == 'onedir' &&
-              format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
-              ||
-              format('--arch={0}', matrix.arch)
-          }}
-
-      - name: Cleanup
-        run: |
-          rm -rf pkgs/checkout/
-
-      - name: Set Artifact Name
-        id: set-artifact-name
-        run: |
-          if [ "${{ inputs.source }}" != "src" ]; then
-            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
-          else
-            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"
-          fi
-
-      - name: Upload DEBs
-        uses: actions/upload-artifact@v4
-        with:
-          name: ${{ steps.set-artifact-name.outputs.artifact-name }}
-          path: ${{ github.workspace }}/pkgs/*
-          retention-days: 7
-          if-no-files-found: error
-
-  build-rpm-packages:
-    name: RPM
-    runs-on:
-      - self-hosted
-      - linux
-      - medium
-      - ${{ matrix.arch }}
-    strategy:
-      fail-fast: false
-      matrix:
-        arch:
-          - x86_64
-          - arm64
-        source:
-          - ${{ inputs.source }}
-
-    container:
-      image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
-          path: artifacts/
-
-      - name: Download Release Patch
-        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ inputs.salt-version }}.patch
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Setup Salt Version
-        id: setup-salt-version
-        uses: ./.github/actions/setup-salt-version
-        with:
-          salt-version: "${{ inputs.salt-version }}"
-
-      - name: Configure Git
-        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
-        run: |
-          tools pkg configure-git
-
-      - name: Apply release patch
-        if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
-        run: |
-          tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
-
-      - name: Build RPM
-        run: |
-          tools pkg build rpm  --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
-              inputs.source == 'onedir' &&
-              format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
-              ||
-              format('--arch={0}', matrix.arch)
-          }}
-
-      - name: Set Artifact Name
-        id: set-artifact-name
-        run: |
-          if [ "${{ inputs.source }}" != "src" ]; then
-            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
-          else
-            echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"
-          fi
-
-      - name: Upload RPMs
-        uses: actions/upload-artifact@v4
-        with:
-          name: ${{ steps.set-artifact-name.outputs.artifact-name }}
-          path: ~/rpmbuild/RPMS/${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}/*.rpm
-          retention-days: 7
-          if-no-files-found: error
-
   build-windows-pkgs:
     name: Windows
+    if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
     environment: ${{ inputs.environment }}
     strategy:
       fail-fast: false
       max-parallel: 2
       matrix:
-        arch:
-          - x86
-          - amd64
-        source:
-          - ${{ inputs.source }}
-
+        include: ${{ fromJSON(inputs.matrix)['windows'] }}
     runs-on:
       - windows-latest
     env:
diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml
index 5913038bbd2..5c3078b2c96 100644
--- a/.github/workflows/build-salt-onedir.yml
+++ b/.github/workflows/build-salt-onedir.yml
@@ -8,12 +8,6 @@ on:
         type: string
         required: true
         description: The Salt version to set prior to building packages.
-      github-hosted-runners:
-        type: boolean
-        required: true
-      self-hosted-runners:
-        type: boolean
-        required: true
       cache-seed:
         required: true
         type: string
@@ -26,6 +20,14 @@ on:
         required: true
         type: string
         description: The version of python to use with relenv
+      matrix:
+        type: string
+        required: true
+        description: Json config for build matrix
+      linux_arm_runner:
+        required: true
+        type: string
+        description: Json job matrix config
 
 env:
   RELENV_DATA: "${{ github.workspace }}/.relenv"
@@ -39,21 +41,18 @@ env:
 
 jobs:
 
+
   build-salt-linux:
     name: Linux
-    if: ${{ inputs.self-hosted-runners }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
     env:
-      USE_S3_CACHE: 'true'
+      USE_S3_CACHE: 'false'
+    runs-on:
+      - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
     strategy:
       fail-fast: false
       matrix:
-        arch:
-          - x86_64
-          - arm64
-    runs-on:
-      - self-hosted
-      - linux
-      - ${{ matrix.arch }}
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
     steps:
 
       - name: "Throttle Builds"
@@ -63,6 +62,10 @@ jobs:
 
       - uses: actions/checkout@v4
 
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.10'
+
       - name: Setup Python Tools Scripts
         uses: ./.github/actions/setup-python-tools-scripts
         with:
@@ -95,18 +98,22 @@ jobs:
 
   build-salt-macos:
     name: macOS
-    if: ${{ inputs.github-hosted-runners }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
     strategy:
       fail-fast: false
       max-parallel: 2
       matrix:
-        arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
+        include: ${{ fromJSON(inputs.matrix)['macos'] }}
     runs-on:
-      - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
+      - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
     env:
       PIP_INDEX_URL: https://pypi.org/simple
 
     steps:
+      - name: "Check cores"
+        shell: bash
+        run: sysctl -n hw.ncpu
+
       - name: "Throttle Builds"
         shell: bash
         run: |
@@ -150,14 +157,12 @@ jobs:
 
   build-salt-windows:
     name: Windows
-    if: ${{ inputs.github-hosted-runners }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
     strategy:
       fail-fast: false
       max-parallel: 2
       matrix:
-        arch:
-          - x86
-          - amd64
+        include: ${{ fromJSON(inputs.matrix)['windows'] }}
     runs-on: windows-latest
     env:
       PIP_INDEX_URL: https://pypi.org/simple
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f21a23a2822..db499c2bee5 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -16,7 +16,7 @@ on:
 
 env:
   COLUMNS: 190
-  CACHE_SEED: SEED-2  # Bump the number to invalidate all caches
+  CACHE_SEED: SEED-1  # Bump the number to invalidate all caches
   RELENV_DATA: "${{ github.workspace }}/.relenv"
   PIP_DISABLE_PIP_VERSION_CHECK: "1"
   RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@@ -38,14 +38,10 @@ jobs:
 
   prepare-workflow:
     name: Prepare Workflow Run
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
+    environment: ci
     outputs:
-      jobs: ${{ steps.define-jobs.outputs.jobs }}
-      runners: ${{ steps.runner-types.outputs.runners }}
       changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
-      os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
-      pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
-      testrun: ${{ steps.define-testrun.outputs.testrun }}
       salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
       cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
       latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
@@ -53,6 +49,11 @@ jobs:
       release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
       testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
       nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
+      config: ${{ steps.workflow-config.outputs.config }}
+    env:
+      LINUX_ARM_RUNNER: ${{ vars.LINUX_ARM_RUNNER }}
+      FULL_TESTRUN_SLUGS: ${{ vars.FULL_TESTRUN_SLUGS }}
+      PR_TESTRUN_SLUGS: ${{ vars.PR_TESTRUN_SLUGS }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -128,6 +129,9 @@ jobs:
                 - pkg/**
                 - *pkg_requirements
                 - *salt_added_modified
+            nsis_tests:
+              - added|modified: &nsis_tests
+                - pkg/windows/nsis/**
             testrun:
               - added|modified:
                 - *pkg_requirements
@@ -162,14 +166,6 @@ jobs:
           salt-version: ""
           validate-version: true
 
-      - name: Get Pull Request Test Labels
-        id: get-pull-labels
-        if: ${{ github.event_name == 'pull_request'}}
-        env:
-            GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        run: |
-          tools ci get-pr-test-labels --repository ${{ github.repository }}
-
       - name: Get Hash For Nox Tarball Cache
         id: nox-archive-hash
         run: |
@@ -194,16 +190,6 @@ jobs:
         run: |
           echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
 
-      - name: Define Runner Types
-        id: runner-types
-        run: |
-          tools ci runner-types ${{ github.event_name }}
-
-      - name: Define Jobs To Run
-        id: define-jobs
-        run: |
-          tools ci define-jobs ${{ github.event_name }} changed-files.json
-
       - name: Get Salt Releases
         id: get-salt-releases
         env:
@@ -218,18 +204,18 @@ jobs:
         run: |
           tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
 
-      - name: Define Testrun
-        id: define-testrun
+      - name: Define workflow config
+        id: workflow-config
         run: |
-          tools ci define-testrun ${{ github.event_name }} changed-files.json
+          tools ci workflow-config ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
 
       - name: Check Contents of generated testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         run: |
           cat testrun-changed-files.txt || true
 
       - name: Upload testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         uses: actions/upload-artifact@v4
         with:
           name: testrun-changed-files.txt
@@ -243,7 +229,6 @@ jobs:
 
   pre-commit:
     name: Pre-Commit
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
     uses: ./.github/workflows/pre-commit-action.yml
     needs:
       - prepare-workflow
@@ -254,17 +239,25 @@ jobs:
 
   lint:
     name: Lint
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['lint'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
     uses: ./.github/workflows/lint-action.yml
     needs:
       - prepare-workflow
     with:
       changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
+  nsis-tests:
+    name: NSIS Tests
+    uses: ./.github/workflows/nsis-tests.yml
+    needs:
+      - prepare-workflow
+    with:
+      changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
 
   prepare-release:
     name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    runs-on: ubuntu-latest
+    runs-on:
+      - ubuntu-22.04
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
     needs:
       - prepare-workflow
     steps:
@@ -386,7 +379,7 @@ jobs:
 
   build-docs:
     name: Documentation
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-docs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
     needs:
       - prepare-workflow
       - build-source-tarball
@@ -397,11 +390,11 @@ jobs:
 
   build-source-tarball:
     name: Build Source Tarball
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-source-tarball'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
     needs:
       - prepare-workflow
       - prepare-release
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
@@ -427,22 +420,22 @@ jobs:
           salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
 
   build-deps-onedir:
-    name: Build Dependencies Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    name: Build Onedir Dependencies
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
     needs:
       - prepare-workflow
     uses: ./.github/workflows/build-deps-onedir.yml
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-salt-onedir:
     name: Build Salt Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
     needs:
       - prepare-workflow
       - build-deps-onedir
@@ -451,14 +444,14 @@ jobs:
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-pkgs-onedir:
     name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -466,26 +459,14 @@ jobs:
     with:
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
       source: "onedir"
-
-  build-pkgs-src:
-    name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-salt-onedir
-    uses: ./.github/workflows/build-packages.yml
-    with:
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
-      source: "src"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
   build-ci-deps:
     name: CI Deps
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -495,1537 +476,57 @@ jobs:
       nox-version: 2022.8.7
       python-version: "3.10"
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
       nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
-
-  rockylinux-8-pkg-tests:
-    name: Rocky Linux 8 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8') }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test-packages:
+    name: Test Package
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg'] }}
     needs:
       - prepare-workflow
       - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-packages-action.yml
     with:
-      distro-slug: rockylinux-8
       nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
+      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}
       testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-8-arm64-pkg-tests:
-    name: Rocky Linux 8 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8-arm64') }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test:
+    name: Test Salt
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test'] }}
     needs:
       - prepare-workflow
-      - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-action.yml
     with:
-      distro-slug: rockylinux-8-arm64
       nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-pkg-tests:
-    name: Rocky Linux 9 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
+      testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-arm64-pkg-tests:
-    name: Rocky Linux 9 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-9-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-pkg-tests:
-    name: Amazon Linux 2 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-arm64-pkg-tests:
-    name: Amazon Linux 2 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-pkg-tests:
-    name: Amazon Linux 2023 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-arm64-pkg-tests:
-    name: Amazon Linux 2023 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-pkg-tests:
-    name: Debian 11 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-arm64-pkg-tests:
-    name: Debian 11 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-pkg-tests:
-    name: Debian 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-arm64-pkg-tests:
-    name: Debian 12 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests:
-    name: Photon OS 4 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-arm64-pkg-tests:
-    name: Photon OS 4 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests-fips:
-    name: Photon OS 4 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-4-arm64-pkg-tests-fips:
-    name: Photon OS 4 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-pkg-tests:
-    name: Photon OS 5 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-arm64-pkg-tests:
-    name: Photon OS 5 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-pkg-tests-fips:
-    name: Photon OS 5 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-arm64-pkg-tests-fips:
-    name: Photon OS 5 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  ubuntu-2004-pkg-tests:
-    name: Ubuntu 20.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2004-arm64-pkg-tests:
-    name: Ubuntu 20.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-pkg-tests:
-    name: Ubuntu 22.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-arm64-pkg-tests:
-    name: Ubuntu 22.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-pkg-tests:
-    name: Ubuntu 24.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-24.04') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-arm64-pkg-tests:
-    name: Ubuntu 24.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-12-pkg-tests:
-    name: macOS 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-pkg-tests:
-    name: macOS 13 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-arm64-pkg-tests:
-    name: macOS 13 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-nsis-pkg-tests:
-    name: Windows 2016 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-msi-pkg-tests:
-    name: Windows 2016 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-nsis-pkg-tests:
-    name: Windows 2019 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-msi-pkg-tests:
-    name: Windows 2019 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-nsis-pkg-tests:
-    name: Windows 2022 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-msi-pkg-tests:
-    name: Windows 2022 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016:
-    name: Windows 2016 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
+      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}
       workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2019:
-    name: Windows 2019 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2022:
-    name: Windows 2022 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  macos-12:
-    name: macOS 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13:
-    name: macOS 13 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13-arm64:
-    name: macOS 13 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8:
-    name: Rocky Linux 8 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8-arm64:
-    name: Rocky Linux 8 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9:
-    name: Rocky Linux 9 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9-arm64:
-    name: Rocky Linux 9 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-9-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2:
-    name: Amazon Linux 2 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2-arm64:
-    name: Amazon Linux 2 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023:
-    name: Amazon Linux 2023 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023-arm64:
-    name: Amazon Linux 2023 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  archlinux-lts:
-    name: Arch Linux LTS Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: archlinux-lts
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11:
-    name: Debian 11 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11-arm64:
-    name: Debian 11 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12:
-    name: Debian 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12-arm64:
-    name: Debian 12 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  fedora-40:
-    name: Fedora 40 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'fedora-40') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: fedora-40
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  opensuse-15:
-    name: Opensuse 15 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'opensuse-15') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: opensuse-15
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4:
-    name: Photon OS 4 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-arm64:
-    name: Photon OS 4 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-fips:
-    name: Photon OS 4 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-4-arm64-fips:
-    name: Photon OS 4 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5:
-    name: Photon OS 5 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-arm64:
-    name: Photon OS 5 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-fips:
-    name: Photon OS 5 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5-arm64-fips:
-    name: Photon OS 5 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  ubuntu-2004:
-    name: Ubuntu 20.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2004-arm64:
-    name: Ubuntu 20.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204:
-    name: Ubuntu 22.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204-arm64:
-    name: Ubuntu 22.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04-arm64') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404:
-    name: Ubuntu 24.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-24.04') }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404-arm64:
-    name: Ubuntu 24.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
-      workflow-slug: ci
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
+      default-timeout: 180
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   combine-all-code-coverage:
     name: Combine Code Coverage
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
-    runs-on: ubuntu-latest
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
+    runs-on: ubuntu-22.04
     env:
       PIP_INDEX_URL: https://pypi.org/simple
     needs:
       - prepare-workflow
       - build-ci-deps
-      - windows-2016
-      - windows-2019
-      - windows-2022
-      - macos-12
-      - macos-13
-      - macos-13-arm64
-      - rockylinux-8
-      - rockylinux-8-arm64
-      - rockylinux-9
-      - rockylinux-9-arm64
-      - amazonlinux-2
-      - amazonlinux-2-arm64
-      - amazonlinux-2023
-      - amazonlinux-2023-arm64
-      - archlinux-lts
-      - debian-11
-      - debian-11-arm64
-      - debian-12
-      - debian-12-arm64
-      - fedora-40
-      - opensuse-15
-      - photonos-4
-      - photonos-4-arm64
-      - photonos-4-fips
-      - photonos-4-arm64-fips
-      - photonos-5
-      - photonos-5-arm64
-      - photonos-5-fips
-      - photonos-5-arm64-fips
-      - ubuntu-2004
-      - ubuntu-2004-arm64
-      - ubuntu-2204
-      - ubuntu-2204-arm64
-      - ubuntu-2404
-      - ubuntu-2404-arm64
     steps:
       - uses: actions/checkout@v4
 
@@ -2059,8 +560,9 @@ jobs:
         id: get-coverage-reports
         uses: actions/download-artifact@v4
         with:
-          name: all-testrun-coverage-artifacts
           path: artifacts/coverage/
+          pattern: all-testrun-coverage-artifacts*
+          merge-multiple: true
 
       - name: Display structure of downloaded files
         run: tree -a artifacts/
@@ -2113,6 +615,7 @@ jobs:
           path: artifacts/coverage/html/salt
           retention-days: 7
           if-no-files-found: error
+          include-hidden-files: true
 
       - name: Report Combined Code Coverage
         run: |
@@ -2129,6 +632,7 @@ jobs:
           path: artifacts/coverage/coverage.json
           retention-days: 7
           if-no-files-found: error
+          include-hidden-files: true
 
       - name: Create Combined Code Coverage HTML Report
         run: |
@@ -2141,3 +645,35 @@ jobs:
           path: artifacts/coverage/html/full
           retention-days: 7
           if-no-files-found: error
+          include-hidden-files: true
+  set-pipeline-exit-status:
+    # This step is just so we can make github require this step, to pass checks
+    # on a pull request instead of requiring all
+    name: Set the ${{ github.workflow }} Pipeline Exit Status
+    if: always()
+    runs-on: ubuntu-22.04
+    needs:
+      - prepare-workflow
+      - pre-commit
+      - lint
+      - nsis-tests
+      - build-docs
+      - build-deps-onedir
+      - build-salt-onedir
+      - combine-all-code-coverage
+      - build-ci-deps
+      - test-packages
+      - test
+    steps:
+      - name: Get workflow information
+        id: get-workflow-info
+        uses: im-open/workflow-conclusion@v2
+
+      - name: Set Pipeline Exit Status
+        shell: bash
+        run: |
+          if [ "${{ steps.get-workflow-info.outputs.workflow_conclusion }}" != "success" ]; then
+            exit 1
+          else
+            exit 0
+          fi
diff --git a/.github/workflows/draft-release.yml b/.github/workflows/draft-release.yml
new file mode 100644
index 00000000000..c509e5cd1ea
--- /dev/null
+++ b/.github/workflows/draft-release.yml
@@ -0,0 +1,132 @@
+---
+name: Draft Github Release
+
+on:
+  workflow_call:
+    inputs:
+      salt-version:
+        type: string
+        required: true
+        description: The Salt version to set prior to building packages.
+      matrix:
+        required: true
+        type: string
+        description: Json job matrix config
+      build-matrix:
+        required: true
+        type: string
+        description: Json job matrix config
+
+env:
+  COLUMNS: 190
+  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+  PIP_DISABLE_PIP_VERSION_CHECK: "1"
+
+jobs:
+
+  list-artifacts:
+    name: List Artifacts
+    runs-on: ubuntu-22.04
+    steps:
+      # Checkout here so we can easily use custom actions
+      - uses: actions/download-artifact@v4
+        with:
+          path: artifacts/
+      - name: List Directory Structure
+        run: ls -R artifacts/
+
+  create-github-release:
+    name: Draft Release v${{ inputs.salt-version }}
+    runs-on: ubuntu-22.04
+    outputs:
+      upload_url: ${{ steps.create_release.outputs.upload_url }}
+    steps:
+      - name: Create Release
+        id: create_release
+        uses: actions/create-release@v1
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        with:
+          release_name: "Release v${{ inputs.salt-version }}"
+          tag_name: v${{ inputs.salt-version }}
+          draft: true
+          prerelease: false
+      - name: Release Output
+        run: echo "upload_url=${{ steps.create_release.outputs.upload_url }}" >> "$GITHUB_OUTPUT"
+
+  upload-source-tarball:
+    needs:
+      - create-github-release
+    uses: ./.github/workflows/release-artifact.yml
+    with:
+      name: salt-${{ inputs.salt-version }}.tar.gz
+      upload_url: ${{ needs.create-github-release.outputs.upload_url }}
+
+  upload-onedir:
+    needs:
+      - create-github-release
+    strategy:
+      matrix:
+        include: ${{ fromJSON(inputs.matrix) }}
+    uses: ./.github/workflows/release-artifact.yml
+    with:
+      name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.${{ matrix.platform == 'windows' && 'zip' || 'tar.xz' }}
+      upload_url: ${{ needs.create-github-release.outputs.upload_url }}
+
+  upload-deb-packages:
+    needs:
+      - create-github-release
+    strategy:
+      matrix:
+        include: ${{ fromJSON(inputs.build-matrix)['linux'] }}
+    uses: ./.github/workflows/release-artifact.yml
+    with:
+      name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb
+      upload_url: ${{ needs.create-github-release.outputs.upload_url }}
+      pattern: "*.deb"
+
+  upload-rpm-packages:
+    needs:
+      - create-github-release
+    strategy:
+      matrix:
+        include: ${{ fromJSON(inputs.build-matrix)['linux'] }}
+    uses: ./.github/workflows/release-artifact.yml
+    with:
+      name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm
+      upload_url: ${{ needs.create-github-release.outputs.upload_url }}
+
+  upload-mac-packages:
+    needs:
+      - create-github-release
+    strategy:
+      matrix:
+        include: ${{ fromJSON(inputs.build-matrix)['macos'] }}
+    uses: ./.github/workflows/release-artifact.yml
+    with:
+      name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos
+      upload_url: ${{ needs.create-github-release.outputs.upload_url }}
+
+  upload-windows-msi-packages:
+    needs:
+      - create-github-release
+    strategy:
+      matrix:
+        include: ${{ fromJSON(inputs.build-matrix)['windows'] }}
+    uses: ./.github/workflows/release-artifact.yml
+    with:
+      name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI
+      upload_url: ${{ needs.create-github-release.outputs.upload_url }}
+
+  upload-windows-nsis-packages:
+    needs:
+      - create-github-release
+    strategy:
+      matrix:
+        include: ${{ fromJSON(inputs.build-matrix)['windows'] }}
+    uses: ./.github/workflows/release-artifact.yml
+    with:
+      name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS
+      upload_url: ${{ needs.create-github-release.outputs.upload_url }}
diff --git a/.github/workflows/lint-action.yml b/.github/workflows/lint-action.yml
index 3c3df9cfb89..09f614fbf36 100644
--- a/.github/workflows/lint-action.yml
+++ b/.github/workflows/lint-action.yml
@@ -18,17 +18,13 @@ env:
 jobs:
   Salt:
     name: Lint Salt's Source Code
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
 
     container:
       image: ghcr.io/saltstack/salt-ci-containers/python:3.10
 
     steps:
-      - name: Install System Deps
-        run: |
-          apt-get update
-          apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
 
       - name: Add Git Safe Directory
         run: |
@@ -62,18 +58,13 @@ jobs:
 
   Tests:
     name: Lint Salt's Test Suite
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }}
 
     container:
       image: ghcr.io/saltstack/salt-ci-containers/python:3.10
 
     steps:
-      - name: Install System Deps
-        run: |
-          echo "deb http://deb.debian.org/debian bookworm-backports main" >> /etc/apt/sources.list
-          apt-get update
-          apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
 
       - name: Add Git Safe Directory
         run: |
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index af926fc01c0..072d652318e 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -22,7 +22,7 @@ on:
 
 env:
   COLUMNS: 190
-  CACHE_SEED: SEED-2  # Bump the number to invalidate all caches
+  CACHE_SEED: SEED-1  # Bump the number to invalidate all caches
   RELENV_DATA: "${{ github.workspace }}/.relenv"
   PIP_DISABLE_PIP_VERSION_CHECK: "1"
   RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@@ -40,7 +40,7 @@ jobs:
 
   workflow-requirements:
     name: Check Workflow Requirements
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     outputs:
       requirements-met: ${{ steps.check-requirements.outputs.requirements-met }}
     steps:
@@ -72,7 +72,7 @@ jobs:
   trigger-branch-nightly-builds:
     name: Trigger Branch Workflows
     if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     needs:
       - workflow-requirements
 
@@ -92,17 +92,13 @@ jobs:
 
   prepare-workflow:
     name: Prepare Workflow Run
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
+    environment: ci
     if: ${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
     needs:
       - workflow-requirements
     outputs:
-      jobs: ${{ steps.define-jobs.outputs.jobs }}
-      runners: ${{ steps.runner-types.outputs.runners }}
       changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
-      os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
-      pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
-      testrun: ${{ steps.define-testrun.outputs.testrun }}
       salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
       cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
       latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
@@ -110,6 +106,11 @@ jobs:
       release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
       testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
       nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
+      config: ${{ steps.workflow-config.outputs.config }}
+    env:
+      LINUX_ARM_RUNNER: ${{ vars.LINUX_ARM_RUNNER }}
+      FULL_TESTRUN_SLUGS: ${{ vars.FULL_TESTRUN_SLUGS }}
+      PR_TESTRUN_SLUGS: ${{ vars.PR_TESTRUN_SLUGS }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -185,6 +186,9 @@ jobs:
                 - pkg/**
                 - *pkg_requirements
                 - *salt_added_modified
+            nsis_tests:
+              - added|modified: &nsis_tests
+                - pkg/windows/nsis/**
             testrun:
               - added|modified:
                 - *pkg_requirements
@@ -219,14 +223,6 @@ jobs:
           salt-version: ""
           validate-version: true
 
-      - name: Get Pull Request Test Labels
-        id: get-pull-labels
-        if: ${{ github.event_name == 'pull_request'}}
-        env:
-            GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        run: |
-          tools ci get-pr-test-labels --repository ${{ github.repository }}
-
       - name: Get Hash For Nox Tarball Cache
         id: nox-archive-hash
         run: |
@@ -251,16 +247,6 @@ jobs:
         run: |
           echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
 
-      - name: Define Runner Types
-        id: runner-types
-        run: |
-          tools ci runner-types ${{ github.event_name }}
-
-      - name: Define Jobs To Run
-        id: define-jobs
-        run: |
-          tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json
-
       - name: Get Salt Releases
         id: get-salt-releases
         env:
@@ -275,18 +261,18 @@ jobs:
         run: |
           tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
 
-      - name: Define Testrun
-        id: define-testrun
+      - name: Define workflow config
+        id: workflow-config
         run: |
-          tools ci define-testrun ${{ github.event_name }} changed-files.json
+          tools ci workflow-config${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
 
       - name: Check Contents of generated testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         run: |
           cat testrun-changed-files.txt || true
 
       - name: Upload testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         uses: actions/upload-artifact@v4
         with:
           name: testrun-changed-files.txt
@@ -300,7 +286,6 @@ jobs:
 
   pre-commit:
     name: Pre-Commit
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
     uses: ./.github/workflows/pre-commit-action.yml
     needs:
       - prepare-workflow
@@ -311,17 +296,25 @@ jobs:
 
   lint:
     name: Lint
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['lint'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
     uses: ./.github/workflows/lint-action.yml
     needs:
       - prepare-workflow
     with:
       changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
+  nsis-tests:
+    name: NSIS Tests
+    uses: ./.github/workflows/nsis-tests.yml
+    needs:
+      - prepare-workflow
+    with:
+      changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
 
   prepare-release:
     name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    runs-on: ubuntu-latest
+    runs-on:
+      - ubuntu-22.04
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
     needs:
       - prepare-workflow
     steps:
@@ -443,7 +436,7 @@ jobs:
 
   build-docs:
     name: Documentation
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-docs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
     needs:
       - prepare-workflow
       - build-source-tarball
@@ -454,11 +447,11 @@ jobs:
 
   build-source-tarball:
     name: Build Source Tarball
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-source-tarball'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
     needs:
       - prepare-workflow
       - prepare-release
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
@@ -484,22 +477,22 @@ jobs:
           salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
 
   build-deps-onedir:
-    name: Build Dependencies Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    name: Build Onedir Dependencies
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
     needs:
       - prepare-workflow
     uses: ./.github/workflows/build-deps-onedir.yml
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-salt-onedir:
     name: Build Salt Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
     needs:
       - prepare-workflow
       - build-deps-onedir
@@ -508,14 +501,14 @@ jobs:
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-pkgs-onedir:
     name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -523,17 +516,19 @@ jobs:
     with:
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
       source: "onedir"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
       environment: nightly
-      sign-macos-packages: true
+      sign-macos-packages: false
       sign-windows-packages: false
     secrets: inherit
 
   build-pkgs-src:
     name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -541,16 +536,18 @@ jobs:
     with:
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
       source: "src"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
       environment: nightly
-      sign-macos-packages: true
+      sign-macos-packages: false
       sign-windows-packages: false
     secrets: inherit
   build-ci-deps:
     name: CI Deps
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -560,2479 +557,78 @@ jobs:
       nox-version: 2022.8.7
       python-version: "3.10"
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
       nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
-
-  rockylinux-8-pkg-tests:
-    name: Rocky Linux 8 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test-packages:
+    name: Test Package
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg'] }}
     needs:
       - prepare-workflow
       - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-packages-action.yml
     with:
-      distro-slug: rockylinux-8
       nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
+      skip-code-coverage: true
       testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-8-arm64-pkg-tests:
-    name: Rocky Linux 8 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test:
+    name: Test Salt
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test'] }}
     needs:
       - prepare-workflow
-      - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-action.yml
     with:
-      distro-slug: rockylinux-8-arm64
       nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-pkg-tests:
-    name: Rocky Linux 9 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
+      testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-arm64-pkg-tests:
-    name: Rocky Linux 9 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-pkg-tests:
-    name: Amazon Linux 2 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-arm64-pkg-tests:
-    name: Amazon Linux 2 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-pkg-tests:
-    name: Amazon Linux 2023 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-arm64-pkg-tests:
-    name: Amazon Linux 2023 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-pkg-tests:
-    name: Debian 11 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-arm64-pkg-tests:
-    name: Debian 11 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-pkg-tests:
-    name: Debian 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-arm64-pkg-tests:
-    name: Debian 12 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests:
-    name: Photon OS 4 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-arm64-pkg-tests:
-    name: Photon OS 4 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests-fips:
-    name: Photon OS 4 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-4-arm64-pkg-tests-fips:
-    name: Photon OS 4 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-pkg-tests:
-    name: Photon OS 5 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-arm64-pkg-tests:
-    name: Photon OS 5 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-pkg-tests-fips:
-    name: Photon OS 5 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-arm64-pkg-tests-fips:
-    name: Photon OS 5 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  ubuntu-2004-pkg-tests:
-    name: Ubuntu 20.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2004-arm64-pkg-tests:
-    name: Ubuntu 20.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-pkg-tests:
-    name: Ubuntu 22.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-arm64-pkg-tests:
-    name: Ubuntu 22.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-pkg-tests:
-    name: Ubuntu 24.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-arm64-pkg-tests:
-    name: Ubuntu 24.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-12-pkg-tests:
-    name: macOS 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-pkg-tests:
-    name: macOS 13 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-arm64-pkg-tests:
-    name: macOS 13 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-nsis-pkg-tests:
-    name: Windows 2016 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-msi-pkg-tests:
-    name: Windows 2016 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-nsis-pkg-tests:
-    name: Windows 2019 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-msi-pkg-tests:
-    name: Windows 2019 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-nsis-pkg-tests:
-    name: Windows 2022 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-msi-pkg-tests:
-    name: Windows 2022 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016:
-    name: Windows 2016 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
+      skip-code-coverage: true
       workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2019:
-    name: Windows 2019 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2022:
-    name: Windows 2022 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  macos-12:
-    name: macOS 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13:
-    name: macOS 13 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13-arm64:
-    name: macOS 13 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8:
-    name: Rocky Linux 8 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8-arm64:
-    name: Rocky Linux 8 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9:
-    name: Rocky Linux 9 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9-arm64:
-    name: Rocky Linux 9 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2:
-    name: Amazon Linux 2 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2-arm64:
-    name: Amazon Linux 2 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023:
-    name: Amazon Linux 2023 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023-arm64:
-    name: Amazon Linux 2023 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  archlinux-lts:
-    name: Arch Linux LTS Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: archlinux-lts
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11:
-    name: Debian 11 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11-arm64:
-    name: Debian 11 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12:
-    name: Debian 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12-arm64:
-    name: Debian 12 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  fedora-40:
-    name: Fedora 40 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: fedora-40
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  opensuse-15:
-    name: Opensuse 15 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: opensuse-15
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4:
-    name: Photon OS 4 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-arm64:
-    name: Photon OS 4 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-fips:
-    name: Photon OS 4 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-4-arm64-fips:
-    name: Photon OS 4 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5:
-    name: Photon OS 5 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-arm64:
-    name: Photon OS 5 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-fips:
-    name: Photon OS 5 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5-arm64-fips:
-    name: Photon OS 5 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  ubuntu-2004:
-    name: Ubuntu 20.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2004-arm64:
-    name: Ubuntu 20.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204:
-    name: Ubuntu 22.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204-arm64:
-    name: Ubuntu 22.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404:
-    name: Ubuntu 24.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404-arm64:
-    name: Ubuntu 24.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: nightly
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  combine-all-code-coverage:
-    name: Combine Code Coverage
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
-    runs-on: ubuntu-latest
-    env:
-      PIP_INDEX_URL: https://pypi.org/simple
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-      - windows-2016
-      - windows-2019
-      - windows-2022
-      - macos-12
-      - macos-13
-      - macos-13-arm64
-      - rockylinux-8
-      - rockylinux-8-arm64
-      - rockylinux-9
-      - rockylinux-9-arm64
-      - amazonlinux-2
-      - amazonlinux-2-arm64
-      - amazonlinux-2023
-      - amazonlinux-2023-arm64
-      - archlinux-lts
-      - debian-11
-      - debian-11-arm64
-      - debian-12
-      - debian-12-arm64
-      - fedora-40
-      - opensuse-15
-      - photonos-4
-      - photonos-4-arm64
-      - photonos-4-fips
-      - photonos-4-arm64-fips
-      - photonos-5
-      - photonos-5-arm64
-      - photonos-5-fips
-      - photonos-5-arm64-fips
-      - ubuntu-2004
-      - ubuntu-2004-arm64
-      - ubuntu-2204
-      - ubuntu-2204-arm64
-      - ubuntu-2404
-      - ubuntu-2404-arm64
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Set up Python 3.10
-        uses: actions/setup-python@v5
-        with:
-          python-version: "3.10"
-
-      - name: Setup Python Tools Scripts
-        id: python-tools-scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage
-
-      - name: Install Nox
-        run: |
-          python3 -m pip install 'nox==2022.8.7'
-
-
-
-      - name: Merge All Code Coverage Test Run Artifacts
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: all-testrun-coverage-artifacts
-          pattern: all-testrun-coverage-artifacts-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Get coverage reports
-        id: get-coverage-reports
-        uses: actions/download-artifact@v4
-        with:
-          name: all-testrun-coverage-artifacts
-          path: artifacts/coverage/
-
-      - name: Display structure of downloaded files
-        run: tree -a artifacts/
-
-      - name: Install Codecov CLI
-        run: |
-          # We can't yet use tokenless uploads with the codecov CLI
-          # python3 -m pip install codecov-cli
-          #
-          curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
-          curl -Os https://uploader.codecov.io/latest/linux/codecov
-          curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
-          curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
-          gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
-          shasum -a 256 -c codecov.SHA256SUM
-          chmod +x codecov
-          mv ./codecov /usr/local/bin/
-
-      - name: Create XML Coverage Reports
-        run: |
-          nox --force-color -e create-xml-coverage-reports
-
-      - name: Upload Code Coverage To Codecov
-        if: ${{ ! github.event.repository.private && ! github.event.repository.fork }}
-        env:
-          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
-        run: |
-          tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/
-
-      - name: Combine Code Coverage
-        run: |
-          nox --force-color -e combine-coverage
-
-      - name: Report Salt Code Coverage
-        run: |
-          nox --force-color -e coverage-report -- salt
-
-      - name: Create Salt Code Coverage HTML Report
-        run: |
-          nox --force-color -e create-html-coverage-report -- salt
-
-      - name: Create Salt Code Coverage HTML Report
-        run: |
-          nox --force-color -e create-html-coverage-report -- salt
-
-      - name: Upload Salt Code Coverage HTML Report
-        uses: actions/upload-artifact@v4
-        with:
-          name: code-coverage-salt-html-report
-          path: artifacts/coverage/html/salt
-          retention-days: 7
-          if-no-files-found: error
-
-      - name: Report Combined Code Coverage
-        run: |
-          nox --force-color -e coverage-report
-
-      - name: Create Combined Code Coverage JSON Report
-        run: |
-          nox --force-color -e create-json-coverage-reports
-
-      - name: Upload Combined Code Coverage JSON Report
-        uses: actions/upload-artifact@v4
-        with:
-          name: code-coverage-full-json-report
-          path: artifacts/coverage/coverage.json
-          retention-days: 7
-          if-no-files-found: error
-
-      - name: Create Combined Code Coverage HTML Report
-        run: |
-          nox --force-color -e create-html-coverage-report
-
-      - name: Upload Combined Code Coverage HTML Report
-        uses: actions/upload-artifact@v4
-        with:
-          name: code-coverage-full-html-report
-          path: artifacts/coverage/html/full
-          retention-days: 7
-          if-no-files-found: error
-
-  build-src-repo:
-    name: Build Repository
-    environment: nightly
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-nightly
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-source-tarball
-      - build-pkgs-src
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - src
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download Source Tarball
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Copy Files For Source Only Artifact Uploads
-        run: |
-          mkdir artifacts/src
-          find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \;
-
-      - name: Upload Standalone Repository As An Artifact
-        uses: actions/upload-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-src-repo
-          path: |
-            artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
-            artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
-            artifacts/src/*-GPG-*
-          retention-days: 7
-          if-no-files-found: error
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-repo-src
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: src-repo
-
-  build-deb-repo:
-    name: Build Repository
-    environment: nightly
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-nightly
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - pkg-type: deb
-            distro: debian
-            version: "11"
-            arch: x86_64
-          - pkg-type: deb
-            distro: debian
-            version: "11"
-            arch: arm64
-          - pkg-type: deb
-            distro: debian
-            version: "12"
-            arch: x86_64
-          - pkg-type: deb
-            distro: debian
-            version: "12"
-            arch: arm64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "20.04"
-            arch: x86_64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "20.04"
-            arch: arm64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "22.04"
-            arch: x86_64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "22.04"
-            arch: arm64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "24.04"
-            arch: x86_64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "24.04"
-            arch: arm64
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Download System Dependencies
-        run: |
-          sudo apt update
-          sudo apt install -y devscripts apt-utils
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download DEB Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-deb
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-repo-${{ matrix.pkg-type }}-${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
-
-  build-rpm-repo:
-    name: Build Repository
-    environment: nightly
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-nightly
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - pkg-type: rpm
-            distro: amazon
-            version: "2"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2"
-            arch: arm64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2023"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2023"
-            arch: arm64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2023"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: fedora
-            version: "40"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: fedora
-            version: "40"
-            arch: arm64
-          - pkg-type: rpm
-            distro: fedora
-            version: "40"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: photon
-            version: "4"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: photon
-            version: "4"
-            arch: arm64
-          - pkg-type: rpm
-            distro: photon
-            version: "4"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: photon
-            version: "5"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: photon
-            version: "5"
-            arch: arm64
-          - pkg-type: rpm
-            distro: photon
-            version: "5"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: redhat
-            version: "8"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: redhat
-            version: "8"
-            arch: arm64
-          - pkg-type: rpm
-            distro: redhat
-            version: "8"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: redhat
-            version: "9"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: redhat
-            version: "9"
-            arch: arm64
-          - pkg-type: rpm
-            distro: redhat
-            version: "9"
-            arch: aarch64
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Download System Dependencies
-        run: |
-          sudo apt update
-          sudo apt install -y rpm
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download RPM Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}-rpm
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        env:
-          SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
-          SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
-        run: |
-          tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-repo-${{ matrix.pkg-type }}-${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
-
-  build-windows-repo:
-    name: Build Repository
-    environment: nightly
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-nightly
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - windows
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download Windows NSIS x86 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86-NSIS
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows MSI x86 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86-MSI
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows NSIS amd64 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-amd64-NSIS
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows MSI amd64 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-amd64-MSI
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-repo-windows
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: windows-repo
-
-  build-macos-repo:
-    name: Build Repository
-    environment: nightly
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-nightly
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - macos
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download macOS x86_64 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86_64-macos
-          path: artifacts/pkgs/incoming
-
-      - name: Download macOS Arch64 Packages
-        if: ${{ ! github.event.repository.fork }}
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-arm64-macos
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-repo-macos
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: macos-repo
-
-  build-onedir-repo:
-    name: Build Repository
-    environment: nightly
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-nightly
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-salt-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - onedir
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download Linux x86_64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-linux-x86_64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Linux arm64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-linux-arm64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download macOS x86_64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-macos-x86_64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download macOS arm64 Onedir Archive
-        if: ${{ ! github.event.repository.fork }}
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-macos-arm64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows amd64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-amd64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows amd64 Onedir Archive(zip)
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-amd64.zip
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows x86 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-x86.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows amd64 Onedir Archive(zip)
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-x86.zip
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-repo-onedir
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: onedir-repo
-
-  publish-repositories:
-    name: Publish Repositories
-    if: ${{ always() && ! failure() && ! cancelled() }}
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-nightly
+      default-timeout: 360
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  set-pipeline-exit-status:
+    # This step is just so we can make github require this step, to pass checks
+    # on a pull request instead of requiring all
+    name: Set the ${{ github.workflow }} Pipeline Exit Status
+    if: always()
+    runs-on: ubuntu-22.04
     environment: nightly
     needs:
+      - workflow-requirements
+      - trigger-branch-nightly-builds
       - prepare-workflow
+      - pre-commit
+      - lint
+      - nsis-tests
       - build-docs
-      - build-src-repo
-      - build-deb-repo
-      - build-rpm-repo
-      - build-windows-repo
-      - build-macos-repo
-      - build-onedir-repo
+      - build-deps-onedir
+      - build-salt-onedir
+      - build-pkgs-src
       - build-ci-deps
-      - windows-2016
-      - windows-2019
-      - windows-2022
-      - macos-12
-      - macos-13
-      - macos-13-arm64
-      - rockylinux-8
-      - rockylinux-8-arm64
-      - rockylinux-9
-      - rockylinux-9-arm64
-      - amazonlinux-2
-      - amazonlinux-2-arm64
-      - amazonlinux-2023
-      - amazonlinux-2023-arm64
-      - archlinux-lts
-      - debian-11
-      - debian-11-arm64
-      - debian-12
-      - debian-12-arm64
-      - fedora-40
-      - opensuse-15
-      - photonos-4
-      - photonos-4-arm64
-      - photonos-4-fips
-      - photonos-4-arm64-fips
-      - photonos-5
-      - photonos-5-arm64
-      - photonos-5-fips
-      - photonos-5-arm64-fips
-      - ubuntu-2004
-      - ubuntu-2004-arm64
-      - ubuntu-2204
-      - ubuntu-2204-arm64
-      - ubuntu-2404
-      - ubuntu-2404-arm64
-
+      - test-packages
+      - test
     steps:
+      - name: Get workflow information
+        id: get-workflow-info
+        uses: im-open/workflow-conclusion@v2
 
-      - uses: actions/checkout@v4
-
-      - name: Get Salt Project GitHub Actions Bot Environment
+      - name: Set Pipeline Exit Status
+        shell: bash
         run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Download Repository Artifact
-        uses: actions/download-artifact@v4
-        with:
-          pattern: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-repo-*
-          merge-multiple: true
-          path: repo/
-
-      - name: Decompress Repository Artifacts
-        run: |
-          find repo/ -type f -name '*.tar.gz' -print -exec tar xvf {} \;
-          find repo/ -type f -name '*.tar.gz' -print -exec rm -f {} \;
-
-      - name: Show Repository
-        run: |
-          tree -a artifacts/pkgs/repo/
-
-      - name: Upload Repository Contents (nightly)
-        env:
-          SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
-          SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
-        run: |
-          tools pkg repo publish nightly --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/
+          if [ "${{ steps.get-workflow-info.outputs.workflow_conclusion }}" != "success" ]; then
+            exit 1
+          else
+            exit 0
+          fi
diff --git a/.github/workflows/nsis-tests.yml b/.github/workflows/nsis-tests.yml
new file mode 100644
index 00000000000..e80ed43f409
--- /dev/null
+++ b/.github/workflows/nsis-tests.yml
@@ -0,0 +1,67 @@
+---
+name: Test NSIS Installer
+
+on:
+  workflow_call:
+    inputs:
+      changed-files:
+        required: true
+        type: string
+        description: JSON string containing information about changed files
+
+jobs:
+  Test-NSIS-Logic:
+    name: Logic Tests
+    runs-on:
+      - windows-latest
+    if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['nsis_tests'] }}
+
+    steps:
+
+    - name: Checkout Salt
+      uses: actions/checkout@v4
+
+    - name: Set Up Python 3.10
+      uses: actions/setup-python@v5
+      with:
+        python-version: "3.10"
+
+    - name: Install NSIS
+      run: .\pkg\windows\install_nsis.cmd -CICD
+      shell: cmd
+
+    - name: Build Test Installer
+      run: .\pkg\windows\nsis\tests\setup.cmd -CICD
+      shell: cmd
+
+    - name: Run Config Tests
+      run: .\pkg\windows\nsis\tests\test.cmd -CICD .\config_tests
+      shell: cmd
+
+  Test-NSIS-Stress:
+    name: Stress Tests
+    runs-on:
+      - windows-latest
+    if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['nsis_tests'] }}
+
+    steps:
+
+      - name: Checkout Salt
+        uses: actions/checkout@v4
+
+      - name: Set Up Python 3.10
+        uses: actions/setup-python@v5
+        with:
+          python-version: "3.10"
+
+      - name: Install NSIS
+        run: .\pkg\windows\install_nsis.cmd -CICD
+        shell: cmd
+
+      - name: Build Test Installer
+        run: .\pkg\windows\nsis\tests\setup.cmd -CICD
+        shell: cmd
+
+      - name: Run Stress Test
+        run: .\pkg\windows\nsis\tests\test.cmd -CICD .\stress_tests
+        shell: cmd
diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml
index ac8ff64a10e..26c119aa69a 100644
--- a/.github/workflows/pre-commit-action.yml
+++ b/.github/workflows/pre-commit-action.yml
@@ -21,21 +21,16 @@ jobs:
   Pre-Commit:
     name: Run Pre-Commit Against Salt
 
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
 
     container:
-      image: ghcr.io/saltstack/salt-ci-containers/python:3.10
+      image: ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-22.04
 
     env:
       PRE_COMMIT_COLOR: always
 
     steps:
 
-      - name: Install System Deps
-        run: |
-          apt-get update
-          apt-get install -y wget curl enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev rustc
-
       - name: Add Git Safe Directory
         run: |
           git config --global --add safe.directory "$(pwd)"
diff --git a/.github/workflows/release-artifact.yml b/.github/workflows/release-artifact.yml
new file mode 100644
index 00000000000..35a9854046b
--- /dev/null
+++ b/.github/workflows/release-artifact.yml
@@ -0,0 +1,69 @@
+---
+name: Upload Release Artifact
+
+on:
+  workflow_call:
+    inputs:
+      name:
+        type: string
+        required: true
+        description: The Salt version to set prior to building packages.
+      upload_url:
+        type: string
+        required: true
+        description: Release's upload url.
+      pattern:
+        type: string
+        required: false
+        description: Pattern of files to upload
+
+
+jobs:
+
+  list-files:
+    name: List ${{ inputs.name }}
+    runs-on: ubuntu-22.04
+    outputs:
+      files: ${{ steps.list-files.outputs.files }}
+    steps:
+      - uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.name }}
+          path: artifacts
+      - run: find artifacts -maxdepth 1 -type f -printf '%f\n'
+      - id: list-files
+        run: |
+          if [ "${{ inputs.pattern }}" != "" ]; then
+            echo files="$(find artifacts -maxdepth 1 -type f -name '${{ inputs.pattern }}' -printf '%f\n' | jq -Rnc '[inputs | { file: "\(.)" }]')" >> "$GITHUB_OUTPUT"
+          else
+            echo files="$(find artifacts -maxdepth 1 -type f -printf '%f\n' | jq -Rnc '[inputs | { file: "\(.)" }]')" >> "$GITHUB_OUTPUT"
+          fi
+
+  upload-files:
+    name: Upload ${{ matrix.file }} from ${{ inputs.name }}
+    runs-on: ubuntu-22.04
+    needs:
+      - list-files
+    strategy:
+      matrix:
+        include: ${{ fromJSON(needs.list-files.outputs.files) }}
+    steps:
+      - uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.name }}
+          path: artifacts
+
+      - name: Detect type of ${{ matrix.file }}
+        id: file-type
+        run: echo "file_type=$( file --mime-type artifacts/${{ matrix.file }} )" >> "$GITHUB_OUTPUT"
+
+      - name: Upload ${{ matrix.file }}
+        id: upload-release-asset-source
+        uses: actions/upload-release-asset@v1
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        with:
+          upload_url: ${{ inputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
+          asset_path: artifacts/${{ matrix.file }}
+          asset_name: ${{ matrix.file }}
+          asset_content_type: ${{ steps.file-type.outputs.file_type }}
diff --git a/.github/workflows/release-tag.yml b/.github/workflows/release-tag.yml
index bc4624ef086..7d88b2d63f6 100644
--- a/.github/workflows/release-tag.yml
+++ b/.github/workflows/release-tag.yml
@@ -32,7 +32,7 @@ jobs:
     permissions:
       contents: write  # for dev-drprasad/delete-tag-and-release to delete tags or releases
     name: Generate Tag and Github Release
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     steps:
     - uses: dev-drprasad/delete-tag-and-release@v0.2.0
       if: github.event.inputs.reTag == 'true'
diff --git a/.github/workflows/release-update-winrepo.yml b/.github/workflows/release-update-winrepo.yml
index ab32d73d3c7..f6d47681a90 100644
--- a/.github/workflows/release-update-winrepo.yml
+++ b/.github/workflows/release-update-winrepo.yml
@@ -19,7 +19,7 @@ permissions:
 jobs:
   update-winrepo:
     name: Update Winrepo
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     steps:
 
     - name: Checkout Salt
diff --git a/.github/workflows/release-upload-virustotal.yml b/.github/workflows/release-upload-virustotal.yml
index 431ea00039a..dc760371bb2 100644
--- a/.github/workflows/release-upload-virustotal.yml
+++ b/.github/workflows/release-upload-virustotal.yml
@@ -31,7 +31,6 @@ jobs:
     runs-on:
       - self-hosted
       - linux
-      - repo-release
     steps:
 
     - name: Checkout Salt
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 070cc58314d..794ecb486db 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -21,7 +21,7 @@ on:
 
 env:
   COLUMNS: 190
-  CACHE_SEED: SEED-2  # Bump the number to invalidate all caches
+  CACHE_SEED: SEED-1  # Bump the number to invalidate all caches
   RELENV_DATA: "${{ github.workspace }}/.relenv"
   PIP_DISABLE_PIP_VERSION_CHECK: "1"
   RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@@ -37,7 +37,7 @@ jobs:
 
   check-requirements:
     name: Check Requirements
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     environment: release-check
     steps:
       - name: Check For Admin Permission
@@ -49,11 +49,9 @@ jobs:
   prepare-workflow:
     name: Prepare Workflow Run
     runs-on:
-      - self-hosted
-      - linux
-      - repo-release
+      - linux-x86_64
     env:
-      USE_S3_CACHE: 'true'
+      USE_S3_CACHE: 'false'
     environment: release
     needs:
       - check-requirements
@@ -63,6 +61,7 @@ jobs:
       latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
       releases: ${{ steps.get-salt-releases.outputs.releases }}
       nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
+      config: ${{ steps.workflow-config.outputs.config }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -121,12 +120,15 @@ jobs:
         run: |
           echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
 
+      - name: Define workflow config
+        id: workflow-config
+        run: |
+          tools ci workflow-config${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
+
   download-onedir-artifact:
     name: Download Staging Onedir Artifact
     runs-on:
-      - self-hosted
-      - linux
-      - repo-release
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     environment: release
@@ -184,15 +186,15 @@ jobs:
       nox-version: 2022.8.7
       python-version: "3.10"
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
       nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   backup:
     name: Backup
     runs-on:
-      - self-hosted
-      - linux
-      - repo-release
+      - linux-x86_64
     needs:
       - prepare-workflow
     env:
@@ -223,9 +225,7 @@ jobs:
   publish-repositories:
     name: Publish Repositories
     runs-on:
-      - self-hosted
-      - linux
-      - repo-release
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     needs:
@@ -255,40 +255,17 @@ jobs:
         run: |
           tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }}
 
-  pkg-download-tests:
-    name: Package Downloads
-    if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
-    needs:
-      - prepare-workflow
-      - publish-repositories
-      - build-ci-deps
-      - download-onedir-artifact
-    uses: ./.github/workflows/test-package-downloads-action.yml
-    with:
-      nox-session: ci-test-onedir
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      environment: release
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      skip-code-coverage: true
-      latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
-    secrets: inherit
-
   release:
     name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
     if: ${{ always() && ! failure() && ! cancelled() }}
     runs-on:
-      - self-hosted
-      - linux
-      - repo-release
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     needs:
       - prepare-workflow
       - backup
       - publish-repositories
-      - pkg-download-tests
     environment: release
     steps:
       - name: Clone The Salt Repository
@@ -395,9 +372,7 @@ jobs:
       - release
     environment: release
     runs-on:
-      - self-hosted
-      - linux
-      - repo-release
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     steps:
@@ -443,3 +418,38 @@ jobs:
           TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}"
         run: |
           tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
+  set-pipeline-exit-status:
+    # This step is just so we can make github require this step, to pass checks
+    # on a pull request instead of requiring all
+    name: Set the ${{ github.workflow }} Pipeline Exit Status
+    if: always()
+    runs-on: ubuntu-22.04
+    needs:
+      - check-requirements
+      - prepare-workflow
+      - publish-repositories
+      - release
+      - publish-pypi
+      - build-ci-deps
+    steps:
+      - name: Get workflow information
+        id: get-workflow-info
+        uses: im-open/workflow-conclusion@v2
+
+      - run: |
+          # shellcheck disable=SC2129
+          if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then
+            echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}"
+            echo '```' >> "${GITHUB_STEP_SUMMARY}"
+            echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}"
+            echo '```' >> "${GITHUB_STEP_SUMMARY}"
+          fi
+
+      - name: Set Pipeline Exit Status
+        shell: bash
+        run: |
+          if [ "${{ steps.get-workflow-info.outputs.workflow_conclusion }}" != "success" ]; then
+            exit 1
+          else
+            exit 0
+          fi
diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml
index 46c04fda055..135e7a3f995 100644
--- a/.github/workflows/scheduled.yml
+++ b/.github/workflows/scheduled.yml
@@ -12,7 +12,7 @@ on:
 
 env:
   COLUMNS: 190
-  CACHE_SEED: SEED-2  # Bump the number to invalidate all caches
+  CACHE_SEED: SEED-1  # Bump the number to invalidate all caches
   RELENV_DATA: "${{ github.workspace }}/.relenv"
   PIP_DISABLE_PIP_VERSION_CHECK: "1"
   RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@@ -30,7 +30,7 @@ jobs:
 
   workflow-requirements:
     name: Check Workflow Requirements
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     outputs:
       requirements-met: ${{ steps.check-requirements.outputs.requirements-met }}
     steps:
@@ -62,7 +62,7 @@ jobs:
   trigger-branch-scheduled-builds:
     name: Trigger Branch Workflows
     if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     needs:
       - workflow-requirements
 
@@ -82,17 +82,13 @@ jobs:
 
   prepare-workflow:
     name: Prepare Workflow Run
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
+    environment: ci
     if: ${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
     needs:
       - workflow-requirements
     outputs:
-      jobs: ${{ steps.define-jobs.outputs.jobs }}
-      runners: ${{ steps.runner-types.outputs.runners }}
       changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
-      os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
-      pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
-      testrun: ${{ steps.define-testrun.outputs.testrun }}
       salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
       cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
       latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
@@ -100,6 +96,11 @@ jobs:
       release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
       testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
       nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
+      config: ${{ steps.workflow-config.outputs.config }}
+    env:
+      LINUX_ARM_RUNNER: ${{ vars.LINUX_ARM_RUNNER }}
+      FULL_TESTRUN_SLUGS: ${{ vars.FULL_TESTRUN_SLUGS }}
+      PR_TESTRUN_SLUGS: ${{ vars.PR_TESTRUN_SLUGS }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -175,6 +176,9 @@ jobs:
                 - pkg/**
                 - *pkg_requirements
                 - *salt_added_modified
+            nsis_tests:
+              - added|modified: &nsis_tests
+                - pkg/windows/nsis/**
             testrun:
               - added|modified:
                 - *pkg_requirements
@@ -209,14 +213,6 @@ jobs:
           salt-version: ""
           validate-version: true
 
-      - name: Get Pull Request Test Labels
-        id: get-pull-labels
-        if: ${{ github.event_name == 'pull_request'}}
-        env:
-            GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        run: |
-          tools ci get-pr-test-labels --repository ${{ github.repository }}
-
       - name: Get Hash For Nox Tarball Cache
         id: nox-archive-hash
         run: |
@@ -241,16 +237,6 @@ jobs:
         run: |
           echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
 
-      - name: Define Runner Types
-        id: runner-types
-        run: |
-          tools ci runner-types ${{ github.event_name }}
-
-      - name: Define Jobs To Run
-        id: define-jobs
-        run: |
-          tools ci define-jobs ${{ github.event_name }} changed-files.json
-
       - name: Get Salt Releases
         id: get-salt-releases
         env:
@@ -265,18 +251,18 @@ jobs:
         run: |
           tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
 
-      - name: Define Testrun
-        id: define-testrun
+      - name: Define workflow config
+        id: workflow-config
         run: |
-          tools ci define-testrun ${{ github.event_name }} changed-files.json
+          tools ci workflow-config ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
 
       - name: Check Contents of generated testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         run: |
           cat testrun-changed-files.txt || true
 
       - name: Upload testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         uses: actions/upload-artifact@v4
         with:
           name: testrun-changed-files.txt
@@ -290,7 +276,6 @@ jobs:
 
   pre-commit:
     name: Pre-Commit
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
     uses: ./.github/workflows/pre-commit-action.yml
     needs:
       - prepare-workflow
@@ -301,17 +286,25 @@ jobs:
 
   lint:
     name: Lint
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['lint'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
     uses: ./.github/workflows/lint-action.yml
     needs:
       - prepare-workflow
     with:
       changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
+  nsis-tests:
+    name: NSIS Tests
+    uses: ./.github/workflows/nsis-tests.yml
+    needs:
+      - prepare-workflow
+    with:
+      changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
 
   prepare-release:
     name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    runs-on: ubuntu-latest
+    runs-on:
+      - ubuntu-22.04
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
     needs:
       - prepare-workflow
     steps:
@@ -433,7 +426,7 @@ jobs:
 
   build-docs:
     name: Documentation
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-docs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
     needs:
       - prepare-workflow
       - build-source-tarball
@@ -444,11 +437,11 @@ jobs:
 
   build-source-tarball:
     name: Build Source Tarball
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-source-tarball'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
     needs:
       - prepare-workflow
       - prepare-release
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
@@ -474,22 +467,22 @@ jobs:
           salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
 
   build-deps-onedir:
-    name: Build Dependencies Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    name: Build Onedir Dependencies
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
     needs:
       - prepare-workflow
     uses: ./.github/workflows/build-deps-onedir.yml
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-salt-onedir:
     name: Build Salt Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
     needs:
       - prepare-workflow
       - build-deps-onedir
@@ -498,14 +491,14 @@ jobs:
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-pkgs-onedir:
     name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -513,26 +506,14 @@ jobs:
     with:
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
       source: "onedir"
-
-  build-pkgs-src:
-    name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-salt-onedir
-    uses: ./.github/workflows/build-packages.yml
-    with:
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
-      source: "src"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
   build-ci-deps:
     name: CI Deps
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -542,1649 +523,76 @@ jobs:
       nox-version: 2022.8.7
       python-version: "3.10"
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
       nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
-
-  rockylinux-8-pkg-tests:
-    name: Rocky Linux 8 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test-packages:
+    name: Test Package
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg'] }}
     needs:
       - prepare-workflow
       - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-packages-action.yml
     with:
-      distro-slug: rockylinux-8
       nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
+      skip-code-coverage: true
       testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-8-arm64-pkg-tests:
-    name: Rocky Linux 8 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test:
+    name: Test Salt
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test'] }}
     needs:
       - prepare-workflow
-      - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-action.yml
     with:
-      distro-slug: rockylinux-8-arm64
       nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-pkg-tests:
-    name: Rocky Linux 9 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
+      testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-arm64-pkg-tests:
-    name: Rocky Linux 9 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-pkg-tests:
-    name: Amazon Linux 2 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-arm64-pkg-tests:
-    name: Amazon Linux 2 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-pkg-tests:
-    name: Amazon Linux 2023 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-arm64-pkg-tests:
-    name: Amazon Linux 2023 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-pkg-tests:
-    name: Debian 11 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-arm64-pkg-tests:
-    name: Debian 11 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-pkg-tests:
-    name: Debian 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-arm64-pkg-tests:
-    name: Debian 12 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests:
-    name: Photon OS 4 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-arm64-pkg-tests:
-    name: Photon OS 4 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests-fips:
-    name: Photon OS 4 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-4-arm64-pkg-tests-fips:
-    name: Photon OS 4 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-pkg-tests:
-    name: Photon OS 5 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-arm64-pkg-tests:
-    name: Photon OS 5 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-pkg-tests-fips:
-    name: Photon OS 5 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-arm64-pkg-tests-fips:
-    name: Photon OS 5 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  ubuntu-2004-pkg-tests:
-    name: Ubuntu 20.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2004-arm64-pkg-tests:
-    name: Ubuntu 20.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-pkg-tests:
-    name: Ubuntu 22.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-arm64-pkg-tests:
-    name: Ubuntu 22.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-pkg-tests:
-    name: Ubuntu 24.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-arm64-pkg-tests:
-    name: Ubuntu 24.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-12-pkg-tests:
-    name: macOS 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-pkg-tests:
-    name: macOS 13 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-arm64-pkg-tests:
-    name: macOS 13 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-nsis-pkg-tests:
-    name: Windows 2016 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-msi-pkg-tests:
-    name: Windows 2016 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-nsis-pkg-tests:
-    name: Windows 2019 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-msi-pkg-tests:
-    name: Windows 2019 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-nsis-pkg-tests:
-    name: Windows 2022 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-msi-pkg-tests:
-    name: Windows 2022 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016:
-    name: Windows 2016 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
+      skip-code-coverage: true
       workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2019:
-    name: Windows 2019 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+      default-timeout: 360
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  set-pipeline-exit-status:
+    # This step is just so we can make github require this step, to pass checks
+    # on a pull request instead of requiring all
+    name: Set the ${{ github.workflow }} Pipeline Exit Status
+    if: always()
+    runs-on: ubuntu-22.04
     needs:
+      - workflow-requirements
+      - trigger-branch-scheduled-builds
       - prepare-workflow
+      - pre-commit
+      - lint
+      - nsis-tests
+      - build-docs
+      - build-deps-onedir
+      - build-salt-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2022:
-    name: Windows 2022 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  macos-12:
-    name: macOS 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13:
-    name: macOS 13 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13-arm64:
-    name: macOS 13 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8:
-    name: Rocky Linux 8 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8-arm64:
-    name: Rocky Linux 8 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9:
-    name: Rocky Linux 9 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9-arm64:
-    name: Rocky Linux 9 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2:
-    name: Amazon Linux 2 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2-arm64:
-    name: Amazon Linux 2 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023:
-    name: Amazon Linux 2023 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023-arm64:
-    name: Amazon Linux 2023 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  archlinux-lts:
-    name: Arch Linux LTS Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: archlinux-lts
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11:
-    name: Debian 11 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11-arm64:
-    name: Debian 11 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12:
-    name: Debian 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12-arm64:
-    name: Debian 12 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  fedora-40:
-    name: Fedora 40 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: fedora-40
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  opensuse-15:
-    name: Opensuse 15 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: opensuse-15
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4:
-    name: Photon OS 4 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-arm64:
-    name: Photon OS 4 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-fips:
-    name: Photon OS 4 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-4-arm64-fips:
-    name: Photon OS 4 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5:
-    name: Photon OS 5 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-arm64:
-    name: Photon OS 5 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-fips:
-    name: Photon OS 5 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5-arm64-fips:
-    name: Photon OS 5 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  ubuntu-2004:
-    name: Ubuntu 20.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2004-arm64:
-    name: Ubuntu 20.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204:
-    name: Ubuntu 22.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204-arm64:
-    name: Ubuntu 22.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404:
-    name: Ubuntu 24.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404-arm64:
-    name: Ubuntu 24.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: false
-      workflow-slug: scheduled
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  combine-all-code-coverage:
-    name: Combine Code Coverage
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
-    runs-on: ubuntu-latest
-    env:
-      PIP_INDEX_URL: https://pypi.org/simple
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-      - windows-2016
-      - windows-2019
-      - windows-2022
-      - macos-12
-      - macos-13
-      - macos-13-arm64
-      - rockylinux-8
-      - rockylinux-8-arm64
-      - rockylinux-9
-      - rockylinux-9-arm64
-      - amazonlinux-2
-      - amazonlinux-2-arm64
-      - amazonlinux-2023
-      - amazonlinux-2023-arm64
-      - archlinux-lts
-      - debian-11
-      - debian-11-arm64
-      - debian-12
-      - debian-12-arm64
-      - fedora-40
-      - opensuse-15
-      - photonos-4
-      - photonos-4-arm64
-      - photonos-4-fips
-      - photonos-4-arm64-fips
-      - photonos-5
-      - photonos-5-arm64
-      - photonos-5-fips
-      - photonos-5-arm64-fips
-      - ubuntu-2004
-      - ubuntu-2004-arm64
-      - ubuntu-2204
-      - ubuntu-2204-arm64
-      - ubuntu-2404
-      - ubuntu-2404-arm64
+      - test-packages
+      - test
     steps:
-      - uses: actions/checkout@v4
+      - name: Get workflow information
+        id: get-workflow-info
+        uses: im-open/workflow-conclusion@v2
 
-      - name: Set up Python 3.10
-        uses: actions/setup-python@v5
-        with:
-          python-version: "3.10"
-
-      - name: Setup Python Tools Scripts
-        id: python-tools-scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage
-
-      - name: Install Nox
+      - name: Set Pipeline Exit Status
+        shell: bash
         run: |
-          python3 -m pip install 'nox==2022.8.7'
-
-
-
-      - name: Merge All Code Coverage Test Run Artifacts
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: all-testrun-coverage-artifacts
-          pattern: all-testrun-coverage-artifacts-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Get coverage reports
-        id: get-coverage-reports
-        uses: actions/download-artifact@v4
-        with:
-          name: all-testrun-coverage-artifacts
-          path: artifacts/coverage/
-
-      - name: Display structure of downloaded files
-        run: tree -a artifacts/
-
-      - name: Install Codecov CLI
-        run: |
-          # We can't yet use tokenless uploads with the codecov CLI
-          # python3 -m pip install codecov-cli
-          #
-          curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
-          curl -Os https://uploader.codecov.io/latest/linux/codecov
-          curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
-          curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
-          gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
-          shasum -a 256 -c codecov.SHA256SUM
-          chmod +x codecov
-          mv ./codecov /usr/local/bin/
-
-      - name: Create XML Coverage Reports
-        run: |
-          nox --force-color -e create-xml-coverage-reports
-
-      - name: Upload Code Coverage To Codecov
-        if: ${{ ! github.event.repository.private && ! github.event.repository.fork }}
-        env:
-          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
-        run: |
-          tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/
-
-      - name: Combine Code Coverage
-        run: |
-          nox --force-color -e combine-coverage
-
-      - name: Report Salt Code Coverage
-        run: |
-          nox --force-color -e coverage-report -- salt
-
-      - name: Create Salt Code Coverage HTML Report
-        run: |
-          nox --force-color -e create-html-coverage-report -- salt
-
-      - name: Create Salt Code Coverage HTML Report
-        run: |
-          nox --force-color -e create-html-coverage-report -- salt
-
-      - name: Upload Salt Code Coverage HTML Report
-        uses: actions/upload-artifact@v4
-        with:
-          name: code-coverage-salt-html-report
-          path: artifacts/coverage/html/salt
-          retention-days: 7
-          if-no-files-found: error
-
-      - name: Report Combined Code Coverage
-        run: |
-          nox --force-color -e coverage-report
-
-      - name: Create Combined Code Coverage JSON Report
-        run: |
-          nox --force-color -e create-json-coverage-reports
-
-      - name: Upload Combined Code Coverage JSON Report
-        uses: actions/upload-artifact@v4
-        with:
-          name: code-coverage-full-json-report
-          path: artifacts/coverage/coverage.json
-          retention-days: 7
-          if-no-files-found: error
-
-      - name: Create Combined Code Coverage HTML Report
-        run: |
-          nox --force-color -e create-html-coverage-report
-
-      - name: Upload Combined Code Coverage HTML Report
-        uses: actions/upload-artifact@v4
-        with:
-          name: code-coverage-full-html-report
-          path: artifacts/coverage/html/full
-          retention-days: 7
-          if-no-files-found: error
+          if [ "${{ steps.get-workflow-info.outputs.workflow_conclusion }}" != "success" ]; then
+            exit 1
+          else
+            exit 0
+          fi
diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml
index 8b78622dbaa..4668cf0cc79 100644
--- a/.github/workflows/staging.yml
+++ b/.github/workflows/staging.yml
@@ -37,7 +37,7 @@ on:
 
 env:
   COLUMNS: 190
-  CACHE_SEED: SEED-2  # Bump the number to invalidate all caches
+  CACHE_SEED: SEED-1  # Bump the number to invalidate all caches
   RELENV_DATA: "${{ github.workspace }}/.relenv"
   PIP_DISABLE_PIP_VERSION_CHECK: "1"
   RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@@ -47,15 +47,15 @@ permissions:
   pull-requests: read  # for dorny/paths-filter to read pull requests
   actions: read  # for technote-space/workflow-conclusion-action to get the job statuses
 
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }}
-  cancel-in-progress: false
+#concurrency:
+#  group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }}
+#  cancel-in-progress: false
 
 jobs:
 
   check-requirements:
     name: Check Requirements
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     environment: staging-check
     steps:
       - name: Check For Admin Permission
@@ -66,16 +66,12 @@ jobs:
 
   prepare-workflow:
     name: Prepare Workflow Run
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
+    environment: ci
     needs:
       - check-requirements
     outputs:
-      jobs: ${{ steps.define-jobs.outputs.jobs }}
-      runners: ${{ steps.runner-types.outputs.runners }}
       changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
-      os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
-      pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
-      testrun: ${{ steps.define-testrun.outputs.testrun }}
       salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
       cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
       latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
@@ -83,6 +79,11 @@ jobs:
       release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
       testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
       nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
+      config: ${{ steps.workflow-config.outputs.config }}
+    env:
+      LINUX_ARM_RUNNER: ${{ vars.LINUX_ARM_RUNNER }}
+      FULL_TESTRUN_SLUGS: ${{ vars.FULL_TESTRUN_SLUGS }}
+      PR_TESTRUN_SLUGS: ${{ vars.PR_TESTRUN_SLUGS }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -158,6 +159,9 @@ jobs:
                 - pkg/**
                 - *pkg_requirements
                 - *salt_added_modified
+            nsis_tests:
+              - added|modified: &nsis_tests
+                - pkg/windows/nsis/**
             testrun:
               - added|modified:
                 - *pkg_requirements
@@ -192,14 +196,6 @@ jobs:
           salt-version: "${{ inputs.salt-version }}"
           validate-version: true
 
-      - name: Get Pull Request Test Labels
-        id: get-pull-labels
-        if: ${{ github.event_name == 'pull_request'}}
-        env:
-            GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        run: |
-          tools ci get-pr-test-labels --repository ${{ github.repository }}
-
       - name: Get Hash For Nox Tarball Cache
         id: nox-archive-hash
         run: |
@@ -233,16 +229,6 @@ jobs:
         run: |
           echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
 
-      - name: Define Runner Types
-        id: runner-types
-        run: |
-          tools ci runner-types ${{ github.event_name }}
-
-      - name: Define Jobs To Run
-        id: define-jobs
-        run: |
-          tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ github.event_name }} changed-files.json
-
       - name: Get Salt Releases
         id: get-salt-releases
         env:
@@ -257,18 +243,18 @@ jobs:
         run: |
           tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
 
-      - name: Define Testrun
-        id: define-testrun
+      - name: Define workflow config
+        id: workflow-config
         run: |
-          tools ci define-testrun ${{ github.event_name }} changed-files.json
+          tools ci workflow-config${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
 
       - name: Check Contents of generated testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         run: |
           cat testrun-changed-files.txt || true
 
       - name: Upload testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         uses: actions/upload-artifact@v4
         with:
           name: testrun-changed-files.txt
@@ -282,7 +268,6 @@ jobs:
 
   pre-commit:
     name: Pre-Commit
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
     uses: ./.github/workflows/pre-commit-action.yml
     needs:
       - prepare-workflow
@@ -293,26 +278,35 @@ jobs:
 
   lint:
     name: Lint
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['lint'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
     uses: ./.github/workflows/lint-action.yml
     needs:
       - prepare-workflow
     with:
       changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
+  nsis-tests:
+    name: NSIS Tests
+    uses: ./.github/workflows/nsis-tests.yml
+    needs:
+      - prepare-workflow
+    with:
+      changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
 
   prepare-release:
     name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
     runs-on:
-      - self-hosted
-      - linux
-      - medium
-      - x86_64
+      - ubuntu-22.04
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
     needs:
       - prepare-workflow
     steps:
       - uses: actions/checkout@v4
 
+      - name: Set up Python 3.10
+        uses: actions/setup-python@v5
+        with:
+          python-version: "3.10"
+
       - name: Setup Python Tools Scripts
         uses: ./.github/actions/setup-python-tools-scripts
         with:
@@ -425,7 +419,7 @@ jobs:
 
   build-docs:
     name: Documentation
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-docs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
     needs:
       - prepare-workflow
       - build-source-tarball
@@ -436,11 +430,11 @@ jobs:
 
   build-source-tarball:
     name: Build Source Tarball
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-source-tarball'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
     needs:
       - prepare-workflow
       - prepare-release
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
@@ -466,22 +460,22 @@ jobs:
           salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
 
   build-deps-onedir:
-    name: Build Dependencies Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    name: Build Onedir Dependencies
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
     needs:
       - prepare-workflow
     uses: ./.github/workflows/build-deps-onedir.yml
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-salt-onedir:
     name: Build Salt Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
     needs:
       - prepare-workflow
       - build-deps-onedir
@@ -490,14 +484,14 @@ jobs:
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   build-pkgs-onedir:
     name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -505,17 +499,19 @@ jobs:
     with:
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
       source: "onedir"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
       environment: staging
-      sign-macos-packages: true
+      sign-macos-packages: false
       sign-windows-packages: ${{ inputs.sign-windows-packages }}
     secrets: inherit
 
   build-pkgs-src:
     name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -523,16 +519,18 @@ jobs:
     with:
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-      relenv-version: "0.17.0"
-      python-version: "3.10.14"
+      relenv-version: "0.18.0"
+      python-version: "3.10.15"
       source: "src"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
       environment: staging
-      sign-macos-packages: true
+      sign-macos-packages: false
       sign-windows-packages: ${{ inputs.sign-windows-packages }}
     secrets: inherit
   build-ci-deps:
     name: CI Deps
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -542,2309 +540,59 @@ jobs:
       nox-version: 2022.8.7
       python-version: "3.10"
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
       nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
-
-  rockylinux-8-pkg-tests:
-    name: Rocky Linux 8 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test-packages:
+    name: Test Package
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg'] }}
     needs:
       - prepare-workflow
       - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-packages-action.yml
     with:
-      distro-slug: rockylinux-8
       nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
       skip-code-coverage: true
       testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-8-arm64-pkg-tests:
-    name: Rocky Linux 8 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
+  test:
+    name: Test Salt
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test'] }}
     needs:
       - prepare-workflow
-      - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-action.yml
     with:
-      distro-slug: rockylinux-8-arm64
       nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
       nox-version: 2022.8.7
       python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-pkg-tests:
-    name: Rocky Linux 9 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
+      testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  rockylinux-9-arm64-pkg-tests:
-    name: Rocky Linux 9 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-pkg-tests:
-    name: Amazon Linux 2 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2-arm64-pkg-tests:
-    name: Amazon Linux 2 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-pkg-tests:
-    name: Amazon Linux 2023 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  amazonlinux-2023-arm64-pkg-tests:
-    name: Amazon Linux 2023 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-pkg-tests:
-    name: Debian 11 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-11-arm64-pkg-tests:
-    name: Debian 11 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-pkg-tests:
-    name: Debian 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  debian-12-arm64-pkg-tests:
-    name: Debian 12 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests:
-    name: Photon OS 4 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-arm64-pkg-tests:
-    name: Photon OS 4 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-4-pkg-tests-fips:
-    name: Photon OS 4 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-4-arm64-pkg-tests-fips:
-    name: Photon OS 4 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-pkg-tests:
-    name: Photon OS 5 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-arm64-pkg-tests:
-    name: Photon OS 5 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  photonos-5-pkg-tests-fips:
-    name: Photon OS 5 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  photonos-5-arm64-pkg-tests-fips:
-    name: Photon OS 5 Arm64 Package Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: rpm
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-      fips: true
-
-  ubuntu-2004-pkg-tests:
-    name: Ubuntu 20.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2004-arm64-pkg-tests:
-    name: Ubuntu 20.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-pkg-tests:
-    name: Ubuntu 22.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2204-arm64-pkg-tests:
-    name: Ubuntu 22.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-pkg-tests:
-    name: Ubuntu 24.04 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  ubuntu-2404-arm64-pkg-tests:
-    name: Ubuntu 24.04 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: deb
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-12-pkg-tests:
-    name: macOS 12 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-pkg-tests:
-    name: macOS 13 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  macos-13-arm64-pkg-tests:
-    name: macOS 13 Arm64 Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-nsis-pkg-tests:
-    name: Windows 2016 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016-msi-pkg-tests:
-    name: Windows 2016 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-nsis-pkg-tests:
-    name: Windows 2019 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2019-msi-pkg-tests:
-    name: Windows 2019 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-nsis-pkg-tests:
-    name: Windows 2022 NSIS Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: NSIS
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2022-msi-pkg-tests:
-    name: Windows 2022 MSI Package Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: MSI
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  windows-2016:
-    name: Windows 2016 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2016
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
+      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
       skip-code-coverage: true
       workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2019:
-    name: Windows 2019 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2019
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  windows-2022:
-    name: Windows 2022 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
-    with:
-      distro-slug: windows-2022
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 210 || 360 }}
-
-  macos-12:
-    name: macOS 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-12
-      runner: macos-12
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13:
-    name: macOS 13 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13
-      runner: macos-13
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  macos-13-arm64:
-    name: macOS 13 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: macos-13-arm64
-      runner: macos-13-xlarge
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8:
-    name: Rocky Linux 8 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-8-arm64:
-    name: Rocky Linux 8 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-8-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9:
-    name: Rocky Linux 9 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  rockylinux-9-arm64:
-    name: Rocky Linux 9 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: rockylinux-9-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2:
-    name: Amazon Linux 2 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2-arm64:
-    name: Amazon Linux 2 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023:
-    name: Amazon Linux 2023 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  amazonlinux-2023-arm64:
-    name: Amazon Linux 2023 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: amazonlinux-2023-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  archlinux-lts:
-    name: Arch Linux LTS Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: archlinux-lts
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11:
-    name: Debian 11 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-11-arm64:
-    name: Debian 11 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-11-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12:
-    name: Debian 12 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  debian-12-arm64:
-    name: Debian 12 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: debian-12-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  fedora-40:
-    name: Fedora 40 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: fedora-40
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  opensuse-15:
-    name: Opensuse 15 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: opensuse-15
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4:
-    name: Photon OS 4 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-arm64:
-    name: Photon OS 4 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-4-fips:
-    name: Photon OS 4 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-4-arm64-fips:
-    name: Photon OS 4 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-4-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5:
-    name: Photon OS 5 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-arm64:
-    name: Photon OS 5 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  photonos-5-fips:
-    name: Photon OS 5 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  photonos-5-arm64-fips:
-    name: Photon OS 5 Arm64 Test (fips)
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: photonos-5-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-      fips: true
-
-  ubuntu-2004:
-    name: Ubuntu 20.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2004-arm64:
-    name: Ubuntu 20.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-20.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204:
-    name: Ubuntu 22.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2204-arm64:
-    name: Ubuntu 22.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-22.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404:
-    name: Ubuntu 24.04 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: x86_64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  ubuntu-2404-arm64:
-    name: Ubuntu 24.04 Arm64 Test
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: ubuntu-24.04-arm64
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: arm64
-      nox-version: 2022.8.7
-      gh-actions-python-version: "3.10"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      skip-code-coverage: true
-      workflow-slug: staging
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
-
-  build-src-repo:
-    name: Build Repository
-    environment: staging
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-source-tarball
-      - build-pkgs-src
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - src
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download Source Tarball
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create src --key-id=64CBBC8173D76B3F  \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Copy Files For Source Only Artifact Uploads
-        run: |
-          mkdir artifacts/src
-          find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \;
-
-      - name: Upload Standalone Repository As An Artifact
-        uses: actions/upload-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo
-          path: |
-            artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
-            artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
-            artifacts/src/*-GPG-*
-          retention-days: 7
-          if-no-files-found: error
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-repo-src
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: src-repo
-
-  build-deb-repo:
-    name: Build Repository
-    environment: staging
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - pkg-type: deb
-            distro: debian
-            version: "11"
-            arch: x86_64
-          - pkg-type: deb
-            distro: debian
-            version: "11"
-            arch: arm64
-          - pkg-type: deb
-            distro: debian
-            version: "12"
-            arch: x86_64
-          - pkg-type: deb
-            distro: debian
-            version: "12"
-            arch: arm64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "20.04"
-            arch: x86_64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "20.04"
-            arch: arm64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "22.04"
-            arch: x86_64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "22.04"
-            arch: arm64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "24.04"
-            arch: x86_64
-          - pkg-type: deb
-            distro: ubuntu
-            version: "24.04"
-            arch: arm64
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Download System Dependencies
-        run: |
-          sudo apt update
-          sudo apt install -y devscripts apt-utils
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download DEB Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-deb
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }}  \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-repo-${{ matrix.pkg-type }}-${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
-
-  build-rpm-repo:
-    name: Build Repository
-    environment: staging
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - pkg-type: rpm
-            distro: amazon
-            version: "2"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2"
-            arch: arm64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2023"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2023"
-            arch: arm64
-          - pkg-type: rpm
-            distro: amazon
-            version: "2023"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: fedora
-            version: "40"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: fedora
-            version: "40"
-            arch: arm64
-          - pkg-type: rpm
-            distro: fedora
-            version: "40"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: photon
-            version: "4"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: photon
-            version: "4"
-            arch: arm64
-          - pkg-type: rpm
-            distro: photon
-            version: "4"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: photon
-            version: "5"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: photon
-            version: "5"
-            arch: arm64
-          - pkg-type: rpm
-            distro: photon
-            version: "5"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: redhat
-            version: "8"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: redhat
-            version: "8"
-            arch: arm64
-          - pkg-type: rpm
-            distro: redhat
-            version: "8"
-            arch: aarch64
-          - pkg-type: rpm
-            distro: redhat
-            version: "9"
-            arch: x86_64
-          - pkg-type: rpm
-            distro: redhat
-            version: "9"
-            arch: arm64
-          - pkg-type: rpm
-            distro: redhat
-            version: "9"
-            arch: aarch64
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Download System Dependencies
-        run: |
-          sudo apt update
-          sudo apt install -y rpm
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download RPM Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}-rpm
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        env:
-          SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
-          SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
-          SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
-          SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
-        run: |
-          tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }}  \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-repo-${{ matrix.pkg-type }}-${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
-
-  build-windows-repo:
-    name: Build Repository
-    environment: staging
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - windows
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download Windows NSIS x86 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86-NSIS
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows MSI x86 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86-MSI
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows NSIS amd64 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-amd64-NSIS
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows MSI amd64 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-amd64-MSI
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create windows --key-id=64CBBC8173D76B3F  \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-repo-windows
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: windows-repo
-
-  build-macos-repo:
-    name: Build Repository
-    environment: staging
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - macos
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download macOS x86_64 Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86_64-macos
-          path: artifacts/pkgs/incoming
-
-      - name: Download macOS Arch64 Packages
-        if: ${{ ! github.event.repository.fork }}
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-arm64-macos
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create macos --key-id=64CBBC8173D76B3F  \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-repo-macos
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: macos-repo
-
-  build-onedir-repo:
-    name: Build Repository
-    environment: staging
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      - build-salt-onedir
-    strategy:
-      fail-fast: false
-      matrix:
-        pkg-type:
-          - onedir
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Download Linux x86_64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-linux-x86_64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Linux arm64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-linux-arm64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download macOS x86_64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-macos-x86_64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download macOS arm64 Onedir Archive
-        if: ${{ ! github.event.repository.fork }}
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-macos-arm64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows amd64 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-amd64.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows amd64 Onedir Archive(zip)
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-amd64.zip
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows x86 Onedir Archive
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-x86.tar.xz
-          path: artifacts/pkgs/incoming
-
-      - name: Download Windows amd64 Onedir Archive(zip)
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-windows-x86.zip
-          path: artifacts/pkgs/incoming
-
-      - name: Setup GnuPG
-        run: |
-          sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
-          GNUPGHOME="$(mktemp -d -p /run/gpg)"
-          echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
-          cat <<EOF > "${GNUPGHOME}/gpg.conf"
-          batch
-          no-tty
-          pinentry-mode loopback
-          EOF
-
-      - name: Get Secrets
-        env:
-          SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
-        run: |
-          SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
-          echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text | jq .default_key -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
-            | gpg --import -
-          sync
-          aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
-            --query SecretString --output text| jq .default_passphrase -r | base64 -d \
-            | gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
-          sync
-          rm "$SECRETS_KEY_FILE"
-          echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
-
-      - name: Create Repository Path
-        run: |
-          mkdir -p artifacts/pkgs/repo
-
-      - name: Create Repository
-        run: |
-          tools pkg repo create onedir --key-id=64CBBC8173D76B3F  \
-            --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
-            --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
-
-      - name: Upload Repository As An Artifact
-        uses: ./.github/actions/upload-artifact
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-repo-onedir
-          path: artifacts/pkgs/repo/*
-          retention-days: 7
-          if-no-files-found: error
-          archive-name: onedir-repo
-
-  publish-repositories:
-    name: Publish Repositories
-    if: ${{ always() && ! failure() && ! cancelled() }}
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
-    environment: staging
-    needs:
-      - prepare-workflow
-      - build-docs
-      - build-src-repo
-      - build-deb-repo
-      - build-rpm-repo
-      - build-windows-repo
-      - build-macos-repo
-      - build-onedir-repo
-
-    steps:
-
-      - uses: actions/checkout@v4
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Download Repository Artifact
-        uses: actions/download-artifact@v4
-        with:
-          pattern: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-repo-*
-          merge-multiple: true
-          path: repo/
-
-      - name: Decompress Repository Artifacts
-        run: |
-          find repo/ -type f -name '*.tar.gz' -print -exec tar xvf {} \;
-          find repo/ -type f -name '*.tar.gz' -print -exec rm -f {} \;
-
-      - name: Show Repository
-        run: |
-          tree -a artifacts/pkgs/repo/
-
-      - name: Upload Repository Contents (staging)
-        env:
-          SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
-          SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
-        run: |
-          tools pkg repo publish staging --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/
+      default-timeout: 180
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   upload-release-artifacts:
     name: Upload Release Artifacts
     needs:
       - prepare-workflow
       - build-docs
-      - build-src-repo
     environment: staging
     runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
+      - ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
       - name: Setup Python Tools Scripts
         uses: ./.github/actions/setup-python-tools-scripts
         with:
@@ -2856,62 +604,16 @@ jobs:
           name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
           path: artifacts/release
 
-      - name: Download Source Repository
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo
-          path: artifacts/release
-
       - name: Download Release Documentation (HTML)
         uses: actions/download-artifact@v4
         with:
           name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-docs-html.tar.xz
           path: artifacts/release
 
-      - name: Download Release Documentation (ePub)
-        uses: actions/download-artifact@v4
-        with:
-          name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub
-          path: artifacts/release
-
       - name: Show Release Artifacts
         run: |
           tree -a artifacts/release
 
-      - name: Upload Release Artifacts
-        run: |
-          tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release
-
-      - name: Upload PyPi Artifacts
-        uses: actions/upload-artifact@v4
-        with:
-          name: pypi-artifacts
-          path: |
-            artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
-            artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
-          retention-days: 7
-          if-no-files-found: error
-
-  pkg-download-tests:
-    name: Package Downloads
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    needs:
-      - prepare-workflow
-      - publish-repositories
-      - build-ci-deps
-      - build-salt-onedir
-    uses: ./.github/workflows/test-package-downloads-action.yml
-    with:
-      nox-session: ci-test-onedir
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      environment: staging
-      nox-version: 2022.8.7
-      python-version: "3.10"
-      skip-code-coverage: true
-      latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
-    secrets: inherit
-
   publish-pypi:
     name: Publish to PyPi(test)
     if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }}
@@ -2919,82 +621,9 @@ jobs:
       - prepare-workflow
       - upload-release-artifacts
       - build-ci-deps
-      - windows-2016
-      - windows-2019
-      - windows-2022
-      - macos-12
-      - macos-13
-      - macos-13-arm64
-      - rockylinux-8
-      - rockylinux-8-arm64
-      - rockylinux-9
-      - rockylinux-9-arm64
-      - amazonlinux-2
-      - amazonlinux-2-arm64
-      - amazonlinux-2023
-      - amazonlinux-2023-arm64
-      - archlinux-lts
-      - debian-11
-      - debian-11-arm64
-      - debian-12
-      - debian-12-arm64
-      - fedora-40
-      - opensuse-15
-      - photonos-4
-      - photonos-4-arm64
-      - photonos-4-fips
-      - photonos-4-arm64-fips
-      - photonos-5
-      - photonos-5-arm64
-      - photonos-5-fips
-      - photonos-5-arm64-fips
-      - ubuntu-2004
-      - ubuntu-2004-arm64
-      - ubuntu-2204
-      - ubuntu-2204-arm64
-      - ubuntu-2404
-      - ubuntu-2404-arm64
-      - rockylinux-8-pkg-tests
-      - rockylinux-8-arm64-pkg-tests
-      - rockylinux-9-pkg-tests
-      - rockylinux-9-arm64-pkg-tests
-      - amazonlinux-2-pkg-tests
-      - amazonlinux-2-arm64-pkg-tests
-      - amazonlinux-2023-pkg-tests
-      - amazonlinux-2023-arm64-pkg-tests
-      - debian-11-pkg-tests
-      - debian-11-arm64-pkg-tests
-      - debian-12-pkg-tests
-      - debian-12-arm64-pkg-tests
-      - photonos-4-pkg-tests
-      - photonos-4-arm64-pkg-tests
-      - photonos-4-pkg-tests-fips
-      - photonos-4-arm64-pkg-tests-fips
-      - photonos-5-pkg-tests
-      - photonos-5-arm64-pkg-tests
-      - photonos-5-pkg-tests-fips
-      - photonos-5-arm64-pkg-tests-fips
-      - ubuntu-2004-pkg-tests
-      - ubuntu-2004-arm64-pkg-tests
-      - ubuntu-2204-pkg-tests
-      - ubuntu-2204-arm64-pkg-tests
-      - ubuntu-2404-pkg-tests
-      - ubuntu-2404-arm64-pkg-tests
-      - macos-12-pkg-tests
-      - macos-13-pkg-tests
-      - macos-13-arm64-pkg-tests
-      - windows-2016-nsis-pkg-tests
-      - windows-2016-msi-pkg-tests
-      - windows-2019-nsis-pkg-tests
-      - windows-2019-msi-pkg-tests
-      - windows-2022-nsis-pkg-tests
-      - windows-2022-msi-pkg-tests
-      - pkg-download-tests
     environment: staging
     runs-on:
-      - self-hosted
-      - linux
-      - repo-staging
+      - ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
@@ -3038,3 +667,60 @@ jobs:
           TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}"
         run: |
           tools pkg pypi-upload --test artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
+
+  draft-release:
+    name: Draft Github Release
+    if: |
+      always() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
+      (needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') &&
+      needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' &&
+      needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success'
+    needs:
+      - prepare-workflow
+      - pre-commit
+      - build-salt-onedir
+      - build-pkgs-onedir
+      - test-packages
+      - test
+    permissions:
+      contents: write
+      pull-requests: read
+      id-token: write
+    uses: ./.github/workflows/draft-release.yml
+    with:
+      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['artifact-matrix']) }}
+      build-matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+  set-pipeline-exit-status:
+    # This step is just so we can make github require this step, to pass checks
+    # on a pull request instead of requiring all
+    name: Set the ${{ github.workflow }} Pipeline Exit Status
+    if: always()
+    runs-on: ubuntu-22.04
+    needs:
+      - check-requirements
+      - prepare-workflow
+      - pre-commit
+      - lint
+      - nsis-tests
+      - build-docs
+      - build-deps-onedir
+      - build-salt-onedir
+      - build-pkgs-src
+      - upload-release-artifacts
+      - publish-pypi
+      - test-packages
+      - test
+    steps:
+      - name: Get workflow information
+        id: get-workflow-info
+        uses: im-open/workflow-conclusion@v2
+
+      - name: Set Pipeline Exit Status
+        shell: bash
+        run: |
+          if [ "${{ steps.get-workflow-info.outputs.workflow_conclusion }}" != "success" ]; then
+            exit 1
+          else
+            exit 0
+          fi
diff --git a/.github/workflows/templates/build-ci-deps.yml.jinja b/.github/workflows/templates/build-ci-deps.yml.jinja
index 59f2bf0b9bc..e7856114100 100644
--- a/.github/workflows/templates/build-ci-deps.yml.jinja
+++ b/.github/workflows/templates/build-ci-deps.yml.jinja
@@ -1,9 +1,10 @@
 
   build-ci-deps:
     <%- do test_salt_needs.append("build-ci-deps") %>
+    <%- do test_salt_linux_needs.append("build-ci-deps") %>
     name: CI Deps
     <%- if workflow_slug != 'release' %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
     <%- endif %>
     needs:
       - prepare-workflow
@@ -20,3 +21,5 @@
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
       nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
diff --git a/.github/workflows/templates/build-deb-repo.yml.jinja b/.github/workflows/templates/build-deb-repo.yml.jinja
index 91f8348385c..0772375c76e 100644
--- a/.github/workflows/templates/build-deb-repo.yml.jinja
+++ b/.github/workflows/templates/build-deb-repo.yml.jinja
@@ -23,12 +23,6 @@
         with:
           cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
 
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
       - name: Download DEB Packages
         uses: actions/download-artifact@v4
         with:
diff --git a/.github/workflows/templates/build-macos-repo.yml.jinja b/.github/workflows/templates/build-macos-repo.yml.jinja
index 835e366bf52..6753d64133b 100644
--- a/.github/workflows/templates/build-macos-repo.yml.jinja
+++ b/.github/workflows/templates/build-macos-repo.yml.jinja
@@ -13,12 +13,6 @@
         with:
           cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
 
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
       - name: Download macOS x86_64 Packages
         uses: actions/download-artifact@v4
         with:
diff --git a/.github/workflows/templates/build-onedir-repo.yml.jinja b/.github/workflows/templates/build-onedir-repo.yml.jinja
index c6b51f07166..bb7db53905e 100644
--- a/.github/workflows/templates/build-onedir-repo.yml.jinja
+++ b/.github/workflows/templates/build-onedir-repo.yml.jinja
@@ -13,12 +13,6 @@
         with:
           cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
 
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
       - name: Download Linux x86_64 Onedir Archive
         uses: actions/download-artifact@v4
         with:
diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja
index 745bcc3c9ca..cf32337d1d0 100644
--- a/.github/workflows/templates/build-packages.yml.jinja
+++ b/.github/workflows/templates/build-packages.yml.jinja
@@ -1,4 +1,9 @@
-  <%- for backend in ("onedir", "src") %>
+<%- if gh_environment != "ci" -%>
+<%- set pkg_types = ("onedir", "src") %>
+<%- else -%>
+<%- set pkg_types = ("onedir",) %>
+<%- endif -%>
+  <%- for backend in pkg_types %>
   <%- set job_name = "build-pkgs-{}".format(backend) %>
   <%- if backend == "src" %>
     <%- do conclusion_needs.append(job_name) %>
@@ -6,7 +11,7 @@
 
   <{ job_name }>:
     name: Build Packages
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
     needs:
       - prepare-workflow
       - build-salt-onedir
@@ -17,11 +22,14 @@
       relenv-version: "<{ relenv_version }>"
       python-version: "<{ python_version }>"
       source: "<{ backend }>"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
     <%- if gh_environment != "ci" %>
       environment: <{ gh_environment }>
-      sign-macos-packages: true
+      sign-macos-packages: false
       sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %>
     secrets: inherit
+
     <%- endif %>
 
   <%- endfor %>
diff --git a/.github/workflows/templates/build-repos.yml.jinja b/.github/workflows/templates/build-repos.yml.jinja
deleted file mode 100644
index 92f621c857e..00000000000
--- a/.github/workflows/templates/build-repos.yml.jinja
+++ /dev/null
@@ -1,35 +0,0 @@
-<%- for type, display_name in (
-                                ("src", "Source"),
-                                ("deb", "DEB"),
-                                ("rpm", "RPM"),
-                                ("windows", "Windows"),
-                                ("macos", "macOS"),
-                                ("onedir", "Onedir"),
-                              ) %>
-
-  <%- set job_name = "build-{}-repo".format(type) %>
-    <%- do build_repo_needs.append(job_name) %>
-
-  <{ job_name }>:
-    name: Build Repository
-    environment: <{ gh_environment }>
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
-    env:
-      USE_S3_CACHE: 'true'
-    needs:
-      - prepare-workflow
-      <%- if type not in ("src", "onedir") %>
-      - build-pkgs-onedir
-      <%- elif type == 'onedir' %>
-      - build-salt-onedir
-      <%- elif type == 'src' %>
-      - build-source-tarball
-      - build-pkgs-src
-      <%- endif %>
-
-    <%- include "build-{}-repo.yml.jinja".format(type) %>
-
-<%- endfor %>
diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja
index 7039043d4bf..836382bac42 100644
--- a/.github/workflows/templates/build-rpm-repo.yml.jinja
+++ b/.github/workflows/templates/build-rpm-repo.yml.jinja
@@ -23,12 +23,6 @@
         with:
           cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
 
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
       - name: Download RPM Packages
         uses: actions/download-artifact@v4
         with:
diff --git a/.github/workflows/templates/build-src-repo.yml.jinja b/.github/workflows/templates/build-src-repo.yml.jinja
index 8409f05879b..9127ba7aae6 100644
--- a/.github/workflows/templates/build-src-repo.yml.jinja
+++ b/.github/workflows/templates/build-src-repo.yml.jinja
@@ -13,12 +13,6 @@
         with:
           cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
 
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
       - name: Download Source Tarball
         uses: actions/download-artifact@v4
         with:
diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja
index a259e6dd43d..208d74f63c1 100644
--- a/.github/workflows/templates/ci.yml.jinja
+++ b/.github/workflows/templates/ci.yml.jinja
@@ -12,7 +12,6 @@
   <{ job_name }>:
     <%- do conclusion_needs.append(job_name) %>
     name: Pre-Commit
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
     uses: ./.github/workflows/pre-commit-action.yml
     needs:
       - prepare-workflow
@@ -30,7 +29,7 @@
   lint:
     <%- do conclusion_needs.append('lint') %>
     name: Lint
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
     uses: ./.github/workflows/lint-action.yml
     needs:
       - prepare-workflow
@@ -39,37 +38,37 @@
 
   <%- endif %>
 
+  <%- set job_name = "nsis-tests" %>
+  <%- if includes.get(job_name, True) %>
+  <{ job_name }>:
+    <%- do conclusion_needs.append(job_name) %>
+    name: NSIS Tests
+    uses: ./.github/workflows/nsis-tests.yml
+    needs:
+      - prepare-workflow
+    with:
+      changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
+
+  <%- endif %>
 
   <%- set job_name = "prepare-release" %>
   <%- if includes.get(job_name, True) %>
 
   <{ job_name }>:
     name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
-    <%- if prepare_actual_release %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
     runs-on:
-      - self-hosted
-      - linux
-      - medium
-      - x86_64
-    <%- else %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    runs-on: ubuntu-latest
-    <%- endif %>
+      - ubuntu-22.04
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
     needs:
       - prepare-workflow
     steps:
       - uses: actions/checkout@v4
 
-    <%- if not prepare_actual_release %>
-
       - name: Set up Python 3.10
         uses: actions/setup-python@v5
         with:
           python-version: "3.10"
 
-    <%- endif %>
-
       - name: Setup Python Tools Scripts
         uses: ./.github/actions/setup-python-tools-scripts
         with:
@@ -191,7 +190,7 @@
   <{ job_name }>:
     <%- do conclusion_needs.append(job_name) %>
     name: Documentation
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
     needs:
       - prepare-workflow
       - build-source-tarball
@@ -208,11 +207,11 @@
 
   <{ job_name }>:
     name: Build Source Tarball
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
     needs:
       - prepare-workflow
       - prepare-release
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
@@ -245,29 +244,28 @@
 
   <{ job_name }>:
     <%- do conclusion_needs.append(job_name) %>
-    name: Build Dependencies Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    name: Build Onedir Dependencies
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
     needs:
       - prepare-workflow
     uses: ./.github/workflows/build-deps-onedir.yml
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
       relenv-version: "<{ relenv_version }>"
       python-version: "<{ python_version }>"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   <%- endif %>
 
-
   <%- set job_name = "build-salt-onedir" %>
   <%- if includes.get(job_name, True) %>
 
   <{ job_name }>:
     <%- do conclusion_needs.append(job_name) %>
     name: Build Salt Onedir
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
     needs:
       - prepare-workflow
       - build-deps-onedir
@@ -276,14 +274,13 @@
     with:
       cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-      github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
       relenv-version: "<{ relenv_version }>"
       python-version: "<{ python_version }>"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
 
   <%- endif %>
 
-
   <%- set job_name = "build-pkgs" %>
   <%- if includes.get(job_name, True) %>
   <%- include "build-packages.yml.jinja" %>
@@ -309,8 +306,8 @@
   combine-all-code-coverage:
     <%- do conclusion_needs.append("combine-all-code-coverage") %>
     name: Combine Code Coverage
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
-    runs-on: ubuntu-latest
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
+    runs-on: ubuntu-22.04
     env:
       PIP_INDEX_URL: https://pypi.org/simple
     needs:
@@ -362,8 +359,9 @@
         id: get-coverage-reports
         uses: actions/download-artifact@v4
         with:
-          name: all-testrun-coverage-artifacts
           path: artifacts/coverage/
+          pattern: all-testrun-coverage-artifacts*
+          merge-multiple: true
 
       - name: Display structure of downloaded files
         run: tree -a artifacts/
@@ -416,6 +414,7 @@
           path: artifacts/coverage/html/salt
           retention-days: 7
           if-no-files-found: error
+          include-hidden-files: true
 
       - name: Report Combined Code Coverage
         run: |
@@ -432,6 +431,7 @@
           path: artifacts/coverage/coverage.json
           retention-days: 7
           if-no-files-found: error
+          include-hidden-files: true
 
       - name: Create Combined Code Coverage HTML Report
         run: |
@@ -444,6 +444,7 @@
           path: artifacts/coverage/html/full
           retention-days: 7
           if-no-files-found: error
+          include-hidden-files: true
   <%- endif %>
 
 <%- endblock jobs %>
diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja
index 69fd55b42de..fa69221b468 100644
--- a/.github/workflows/templates/layout.yml.jinja
+++ b/.github/workflows/templates/layout.yml.jinja
@@ -5,7 +5,7 @@
 <%- set prepare_workflow_skip_pkg_test_suite = prepare_workflow_skip_pkg_test_suite|default("") %>
 <%- set prepare_workflow_skip_pkg_download_test_suite = prepare_workflow_skip_pkg_download_test_suite|default("") %>
 <%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %>
-<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}") %>
+<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}") %>
 <%- set gpg_key_id = "64CBBC8173D76B3F" %>
 <%- set prepare_actual_release = prepare_actual_release | default(False) %>
 <%- set gh_actions_workflows_python_version = "3.10" %>
@@ -34,7 +34,7 @@ on:
 
 env:
   COLUMNS: 190
-  CACHE_SEED: SEED-2  # Bump the number to invalidate all caches
+  CACHE_SEED: SEED-1  # Bump the number to invalidate all caches
   RELENV_DATA: "${{ github.workspace }}/.relenv"
   PIP_DISABLE_PIP_VERSION_CHECK: "1"
   RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@@ -78,7 +78,8 @@ jobs:
 
   prepare-workflow:
     name: Prepare Workflow Run
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
+    environment: ci
     <%- if prepare_workflow_if_check %>
     if: <{ prepare_workflow_if_check }>
     <%- endif %>
@@ -89,12 +90,7 @@ jobs:
       <%- endfor %>
     <%- endif %>
     outputs:
-      jobs: ${{ steps.define-jobs.outputs.jobs }}
-      runners: ${{ steps.runner-types.outputs.runners }}
       changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
-      os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
-      pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
-      testrun: ${{ steps.define-testrun.outputs.testrun }}
       salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
       cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
       latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
@@ -102,6 +98,11 @@ jobs:
       release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
       testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
       nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
+      config: ${{ steps.workflow-config.outputs.config }}
+    env:
+      LINUX_ARM_RUNNER: ${{ vars.LINUX_ARM_RUNNER }}
+      FULL_TESTRUN_SLUGS: ${{ vars.FULL_TESTRUN_SLUGS }}
+      PR_TESTRUN_SLUGS: ${{ vars.PR_TESTRUN_SLUGS }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -177,6 +178,9 @@ jobs:
                 - pkg/**
                 - *pkg_requirements
                 - *salt_added_modified
+            nsis_tests:
+              - added|modified: &nsis_tests
+                - pkg/windows/nsis/**
             testrun:
               - added|modified:
                 - *pkg_requirements
@@ -211,14 +215,6 @@ jobs:
           salt-version: "<{ prepare_workflow_salt_version_input }>"
           validate-version: true
 
-      - name: Get Pull Request Test Labels
-        id: get-pull-labels
-        if: ${{ github.event_name == 'pull_request'}}
-        env:
-            GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        run: |
-          tools ci get-pr-test-labels --repository ${{ github.repository }}
-
       - name: Get Hash For Nox Tarball Cache
         id: nox-archive-hash
         run: |
@@ -257,18 +253,6 @@ jobs:
         run: |
           echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
 
-      - name: Define Runner Types
-        id: runner-types
-        run: |
-          tools ci runner-types ${{ github.event_name }}
-
-      - name: Define Jobs To Run
-        id: define-jobs
-        run: |
-          tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{
-            prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
-          }> ${{ github.event_name }} changed-files.json
-
       - name: Get Salt Releases
         id: get-salt-releases
         env:
@@ -283,18 +267,20 @@ jobs:
         run: |
           tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
 
-      - name: Define Testrun
-        id: define-testrun
+      - name: Define workflow config
+        id: workflow-config
         run: |
-          tools ci define-testrun ${{ github.event_name }} changed-files.json
+          tools ci workflow-config<{ prepare_workflow_skip_test_suite }><{
+            prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
+          }> ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
 
       - name: Check Contents of generated testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         run: |
           cat testrun-changed-files.txt || true
 
       - name: Upload testrun-changed-files.txt
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
+        if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
         uses: actions/upload-artifact@v4
         with:
           name: testrun-changed-files.txt
@@ -308,18 +294,18 @@ jobs:
 {#  We can't yet use tokenless uploads with the codecov CLI
 
       - name: Install Codecov CLI
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
+        if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
         run: |
           python3 -m pip install codecov-cli
 
       - name: Save Commit Metadata In Codecov
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
+        if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
         run: |
           codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
             create-commit --git-service github --sha ${{ github.sha }}
 
       - name: Create Codecov Coverage Report
-        if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
+        if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
         run: |
           codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
             create-report --git-service github --sha ${{ github.sha }}
@@ -330,3 +316,48 @@ jobs:
   <%- endif %>
 
 <%- endblock jobs %>
+  set-pipeline-exit-status:
+    # This step is just so we can make github require this step, to pass checks
+    # on a pull request instead of requiring all
+    name: Set the ${{ github.workflow }} Pipeline Exit Status
+    if: always()
+    runs-on: ubuntu-22.04
+    <%- if workflow_slug == "nightly" %>
+    environment: <{ workflow_slug }>
+    <%- endif %>
+    needs:
+      <%- for need in prepare_workflow_needs.iter(consume=True) %>
+      - <{ need }>
+      <%- endfor %>
+      <%- for need in conclusion_needs.iter(consume=True) %>
+      - <{ need }>
+      <%- endfor %>
+      <%- for need in test_salt_needs.iter(consume=False) %>
+      - <{ need }>
+      <%- endfor %>
+      <%- for need in test_salt_pkg_needs.iter(consume=False) %>
+      - <{ need }>
+      <%- endfor %>
+      <%- for need in test_repo_needs.iter(consume=True) %>
+      - <{ need }>
+      <%- endfor %>
+      <%- if workflow_slug != "release" %>
+      - test-packages
+      - test
+      <%- endif %>
+    steps:
+      - name: Get workflow information
+        id: get-workflow-info
+        uses: im-open/workflow-conclusion@v2
+
+      <%- block set_pipeline_exit_status_extra_steps %>
+      <%- endblock set_pipeline_exit_status_extra_steps %>
+
+      - name: Set Pipeline Exit Status
+        shell: bash
+        run: |
+          if [ "${{ steps.get-workflow-info.outputs.workflow_conclusion }}" != "success" ]; then
+            exit 1
+          else
+            exit 0
+          fi
diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja
index d51582bc3af..3bf88c07a95 100644
--- a/.github/workflows/templates/nightly.yml.jinja
+++ b/.github/workflows/templates/nightly.yml.jinja
@@ -1,5 +1,5 @@
 <%- set gh_environment = gh_environment|default("nightly") %>
-<%- set skip_test_coverage_check = skip_test_coverage_check|default("false") %>
+<%- set skip_test_coverage_check = skip_test_coverage_check|default("true") %>
 <%- set prepare_workflow_skip_test_suite = "${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}" %>
 <%- set prepare_workflow_skip_pkg_test_suite = "${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}" %>
 <%- set prepare_workflow_if_check = prepare_workflow_if_check|default("${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}") %>
@@ -51,217 +51,10 @@ concurrency:
   <%- include "workflow-requirements-check.yml.jinja" %>
   <%- include "trigger-branch-workflows.yml.jinja" %>
 
-  {#- When we start using a slack app, we can update messages, not while using incoming webhooks
-  <%- if workflow_slug == "nightly" %>
-
-  <%- do conclusion_needs.append('notify-slack') %>
-  notify-slack:
-    name: Notify Slack
-    runs-on: ubuntu-latest
-    environment: <{ gh_environment }>
-    needs:
-      <%- for need in prepare_workflow_needs.iter(consume=False) %>
-      - <{ need }>
-      <%- endfor %>
-    outputs:
-      update-ts: ${{ steps.slack.outputs.update-ts }}
-    steps:
-      - name: Notify Slack
-        id: slack
-        uses: slackapi/slack-github-action@v1.24.0
-        with:
-          payload: |
-            {
-              "attachments": [
-                {
-                  "color": "ffca28",
-                  "fields": [
-                    {
-                      "title": "Workflow",
-                      "short": true,
-                      "value": "${{ github.workflow }}",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Workflow Run",
-                      "short": true,
-                      "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Branch",
-                      "short": true,
-                      "value": "${{ github.ref_name }}",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Commit",
-                      "short": true,
-                      "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Attempt",
-                      "short": true,
-                      "value": "${{ github.run_attempt }}",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Status",
-                      "short": true,
-                      "value": "running",
-                      "type": "mrkdwn"
-                    }
-                  ],
-                  "author_name": "${{ github.event.sender.login }}",
-                  "author_link": "${{ github.event.sender.html_url }}",
-                  "author_icon": "${{ github.event.sender.avatar_url }}"
-                }
-              ]
-            }
-        env:
-          SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
-          SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
-
-  <%- endif %>
-  #}
-
 <%- endblock pre_jobs %>
 
 <%- block jobs %>
   <{- super() }>
 
-  <%- if includes.get("build-repos", True) %>
-  <%- include "build-repos.yml.jinja" %>
-  <%- endif %>
-
-  publish-repositories:
-    <%- do conclusion_needs.append('publish-repositories') %>
-    name: Publish Repositories
-    if: ${{ always() && ! failure() && ! cancelled() }}
-    runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
-    environment: <{ gh_environment }>
-    needs:
-      - prepare-workflow
-      - build-docs
-      <%- for need in build_repo_needs.iter(consume=True) %>
-      - <{ need }>
-      <%- endfor %>
-      <%- if workflow_slug == "nightly" %>
-      <%- for need in test_salt_needs.iter(consume=True) %>
-      - <{ need }>
-      <%- endfor %>
-      <%- endif %>
-
-    steps:
-
-      - uses: actions/checkout@v4
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
-
-      - name: Download Repository Artifact
-        uses: actions/download-artifact@v4
-        with:
-          pattern: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-repo-*
-          merge-multiple: true
-          path: repo/
-
-      - name: Decompress Repository Artifacts
-        run: |
-          find repo/ -type f -name '*.tar.gz' -print -exec tar xvf {} \;
-          find repo/ -type f -name '*.tar.gz' -print -exec rm -f {} \;
-
-      - name: Show Repository
-        run: |
-          tree -a artifacts/pkgs/repo/
-
-      - name: Upload Repository Contents (<{ gh_environment }>)
-        env:
-          SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
-          SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
-        run: |
-          tools pkg repo publish <{ gh_environment }> --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/
 
 <%- endblock jobs %>
-
-<%- block set_pipeline_exit_status_extra_steps %>
-
-    <%- if workflow_slug == "nightly" %>
-
-      - name: Notify Slack
-        id: slack
-        if: always()
-        uses: slackapi/slack-github-action@v1.24.0
-        with:
-          {#- When we start using a slack app, we can update messages, not while using incoming webhooks
-          update-ts: ${{ needs.notify-slack.outputs.update-ts }}
-          #}
-          payload: |
-            {
-              "attachments": [
-                {
-                  "fallback": "${{ github.workflow }} Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}",
-                  "color": "${{ steps.get-workflow-info.outputs.conclusion != 'success' && 'ff3d00' || '00e676' }}",
-                  "fields": [
-                    {
-                      "title": "Workflow",
-                      "short": true,
-                      "value": "${{ github.workflow }}",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Workflow Run",
-                      "short": true,
-                      "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Branch",
-                      "short": true,
-                      "value": "${{ github.ref_name }}",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Commit",
-                      "short": true,
-                      "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Attempt",
-                      "short": true,
-                      "value": "${{ github.run_attempt }}",
-                      "type": "mrkdwn"
-                    },
-                    {
-                      "title": "Status",
-                      "short": true,
-                      "value": "${{ steps.get-workflow-info.outputs.conclusion }}",
-                      "type": "mrkdwn"
-                    }
-                  ],
-                  "author_name": "${{ github.event.sender.login }}",
-                  "author_link": "${{ github.event.sender.html_url }}",
-                  "author_icon": "${{ github.event.sender.avatar_url }}"
-                }
-              ]
-            }
-        env:
-          SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
-          SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
-
-    <%- endif %>
-
-<%- endblock set_pipeline_exit_status_extra_steps %>
diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja
index f32f502e554..1cdddae0e37 100644
--- a/.github/workflows/templates/release.yml.jinja
+++ b/.github/workflows/templates/release.yml.jinja
@@ -52,7 +52,7 @@ permissions:
   <{ job_name }>:
   <%- do prepare_workflow_needs.append(job_name) %>
     name: Check Requirements
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     environment: <{ gh_environment }>-check
     steps:
       - name: Check For Admin Permission
@@ -71,11 +71,9 @@ permissions:
   prepare-workflow:
     name: Prepare Workflow Run
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - linux-x86_64
     env:
-      USE_S3_CACHE: 'true'
+      USE_S3_CACHE: 'false'
     environment: <{ gh_environment }>
     <%- if prepare_workflow_needs %>
     needs:
@@ -89,6 +87,7 @@ permissions:
       latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
       releases: ${{ steps.get-salt-releases.outputs.releases }}
       nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
+      config: ${{ steps.workflow-config.outputs.config }}
     steps:
       - uses: actions/checkout@v4
         with:
@@ -147,6 +146,14 @@ permissions:
         run: |
           echo "nox-archive-hash=<{ nox_archive_hashfiles }>" | tee -a "$GITHUB_OUTPUT"
 
+      - name: Define workflow config
+        id: workflow-config
+        run: |
+          tools ci workflow-config<{ prepare_workflow_skip_test_suite }><{
+            prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
+          }> ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
+
+
   <%- endblock prepare_workflow_job %>
   <%- endif %>
 
@@ -156,9 +163,7 @@ permissions:
   download-onedir-artifact:
     name: Download Staging Onedir Artifact
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     environment: <{ gh_environment }>
@@ -211,9 +216,7 @@ permissions:
   backup:
     name: Backup
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - linux-x86_64
     needs:
       - prepare-workflow
     env:
@@ -245,9 +248,7 @@ permissions:
     <%- do conclusion_needs.append('publish-repositories') %>
     name: Publish Repositories
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     needs:
@@ -277,18 +278,12 @@ permissions:
         run: |
           tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }}
 
-  <%- if includes.get("test-pkg-downloads", True) %>
-  <%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
-  <%- endif %>
-
   release:
     <%- do conclusion_needs.append('release') %>
     name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
     if: ${{ always() && ! failure() && ! cancelled() }}
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     needs:
@@ -402,9 +397,7 @@ permissions:
     name: Restore Release Bucket From Backup
     if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }}
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     needs:
@@ -445,9 +438,7 @@ permissions:
       - restore #}
     environment: <{ gh_environment }>
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - linux-x86_64
     env:
       USE_S3_CACHE: 'true'
     steps:
diff --git a/.github/workflows/templates/scheduled.yml.jinja b/.github/workflows/templates/scheduled.yml.jinja
index 48ead7ee0f4..e2514161c01 100644
--- a/.github/workflows/templates/scheduled.yml.jinja
+++ b/.github/workflows/templates/scheduled.yml.jinja
@@ -1,5 +1,5 @@
 <%- set prepare_workflow_if_check = "${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}" %>
-<%- set skip_test_coverage_check = "false" %>
+<%- set skip_test_coverage_check = "true" %>
 <%- extends 'ci.yml.jinja' %>
 
 
diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja
index ae096e51e35..c823da809da 100644
--- a/.github/workflows/templates/staging.yml.jinja
+++ b/.github/workflows/templates/staging.yml.jinja
@@ -51,9 +51,9 @@ on:
 
 <%- block concurrency %>
 
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }}
-  cancel-in-progress: false
+#concurrency:
+#  group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }}
+#  cancel-in-progress: false
 
 <%- endblock concurrency %>
 
@@ -65,7 +65,7 @@ concurrency:
   <{ job_name }>:
   <%- do prepare_workflow_needs.append(job_name) %>
     name: Check Requirements
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     environment: <{ gh_environment }>-check
     steps:
       - name: Check For Admin Permission
@@ -86,21 +86,12 @@ concurrency:
     needs:
       - prepare-workflow
       - build-docs
-      - build-src-repo
     environment: <{ gh_environment }>
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
       - name: Setup Python Tools Scripts
         uses: ./.github/actions/setup-python-tools-scripts
         with:
@@ -112,56 +103,16 @@ concurrency:
           name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
           path: artifacts/release
 
-      - name: Download Source Repository
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo
-          path: artifacts/release
-
       - name: Download Release Documentation (HTML)
         uses: actions/download-artifact@v4
         with:
           name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-docs-html.tar.xz
           path: artifacts/release
 
-      - name: Download Release Documentation (ePub)
-        uses: actions/download-artifact@v4
-        with:
-          name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub
-          path: artifacts/release
-
       - name: Show Release Artifacts
         run: |
           tree -a artifacts/release
 
-      {#-
-
-      - name: Download Release Documentation (PDF)
-        uses: actions/download-artifact@v4
-        with:
-          name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.pdf
-          path: artifacts/release
-
-      #}
-
-      - name: Upload Release Artifacts
-        run: |
-          tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release
-
-      - name: Upload PyPi Artifacts
-        uses: actions/upload-artifact@v4
-        with:
-          name: pypi-artifacts
-          path: |
-            artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
-            artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
-          retention-days: 7
-          if-no-files-found: error
-
-  <%- if includes.get("test-pkg-downloads", True) %>
-  <%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
-  <%- endif %>
-
   publish-pypi:
     <%- do conclusion_needs.append('publish-pypi') %>
     name: Publish to PyPi(test)
@@ -180,9 +131,7 @@ concurrency:
       <%- endfor %>
     environment: <{ gh_environment }>
     runs-on:
-      - self-hosted
-      - linux
-      - repo-<{ gh_environment }>
+      - ubuntu-22.04
     steps:
       - uses: actions/checkout@v4
 
@@ -227,4 +176,29 @@ concurrency:
         run: |
           tools pkg pypi-upload --test artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
 
+  draft-release:
+    name: Draft Github Release
+    if: |
+      always() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
+      (needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') &&
+      needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' &&
+      needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success'
+    needs:
+      - prepare-workflow
+      - pre-commit
+      - build-salt-onedir
+      - build-pkgs-onedir
+      - test-packages
+      - test
+    permissions:
+      contents: write
+      pull-requests: read
+      id-token: write
+    uses: ./.github/workflows/draft-release.yml
+    with:
+      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['artifact-matrix']) }}
+      build-matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
+
+
 <%- endblock jobs %>
diff --git a/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja
index d547bd504db..0e9abacf1cb 100644
--- a/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja
+++ b/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja
@@ -6,13 +6,12 @@
     <%- do conclusion_needs.append(job_name) %>
     name: Package Downloads
     <%- if gh_environment == "staging" %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg-download'] }}
     <%- else %>
     if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
     <%- endif %>
     needs:
       - prepare-workflow
-      - publish-repositories
       - build-ci-deps
     <%- if gh_environment == "release" %>
       - download-onedir-artifact
diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja
index 5feb9bf3dbe..a8c327e92f0 100644
--- a/.github/workflows/templates/test-salt-pkg.yml.jinja
+++ b/.github/workflows/templates/test-salt-pkg.yml.jinja
@@ -1,99 +1,19 @@
-  <%- for os in test_salt_pkg_listing["linux"] %>
-    <%- set job_name = "{}-pkg-tests{}".format(os.slug.replace(".", ""), os.fips and '-fips' or '') %>
-
+  <%- set job_name = "test-packages" %>
   <{ job_name }>:
-    <%- do test_salt_pkg_needs.append(job_name) %>
-    name: <{ os.display_name }> Package Test<%- if os.fips %> (fips)<%- endif %>
-    <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    <%- else %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
-    <%- endif %>
+    name: Test Package
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg'] }}
     needs:
       - prepare-workflow
       - build-pkgs-onedir
       - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-linux.yml
+    uses: ./.github/workflows/test-packages-action.yml
     with:
-      distro-slug: <{ os.slug }>
       nox-session: ci-test-onedir
-      platform: linux
-      arch: <{ os.arch }>
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: <{ os.pkg_type }>
       nox-version: <{ nox_version  }>
       python-version: "<{ gh_actions_workflows_python_version }>"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
       skip-code-coverage: <{ skip_test_coverage_check }>
       testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-    <%- if os.fips %>
-      fips: true
-    <%- endif %>
-
-  <%- endfor %>
-
-
-
-  <%- for os in test_salt_pkg_listing["macos"] %>
-    <%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %>
-
-  <{ job_name }>:
-    <%- do test_salt_pkg_needs.append(job_name) %>
-    name: <{ os.display_name }> Package Test
-    <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    <%- else %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
-    <%- endif %>
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-macos.yml
-    with:
-      distro-slug: <{ os.slug }>
-      runner: <{ os.runner }>
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: <{ os.arch }>
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: macos
-      nox-version: <{ nox_version  }>
-      python-version: "<{ gh_actions_workflows_python_version }>"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
-      skip-code-coverage: <{ skip_test_coverage_check }>
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  <%- endfor %>
-
-
-  <%- for os in test_salt_pkg_listing["windows"] %>
-    <%- set job_name = "{}-{}-pkg-tests".format(os.slug.replace(".", ""), os.pkg_type.lower()) %>
-
-  <{ job_name }>:
-    <%- do test_salt_pkg_needs.append(job_name) %>
-    name: <{ os.display_name }> <{ os.pkg_type }> Package Test
-    <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    <%- else %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
-    <%- endif %>
-    needs:
-      - prepare-workflow
-      - build-pkgs-onedir
-      - build-ci-deps
-    uses: ./.github/workflows/test-packages-action-windows.yml
-    with:
-      distro-slug: <{ os.slug }>
-      nox-session: ci-test-onedir
-      platform: windows
-      arch: <{ os.arch }>
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      pkg-type: <{ os.pkg_type }>
-      nox-version: <{ nox_version  }>
-      python-version: "<{ gh_actions_workflows_python_version }>"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
-      skip-code-coverage: <{ skip_test_coverage_check }>
-      testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
-
-  <%- endfor %>
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja
index 86dfd8c1b50..53a7e5f1e1e 100644
--- a/.github/workflows/templates/test-salt.yml.jinja
+++ b/.github/workflows/templates/test-salt.yml.jinja
@@ -1,103 +1,24 @@
-{#-
-  Full test runs. Each chunk should never take more than 2 hours. We allow 3, and on windows we add 30 more minutes.
-  Partial test runs(no chunk parallelization), 6 Hours
-#}
-<%- set full_testrun_timeout_value = 180 %>
-<%- set partial_testrun_timeout_value = 360 %>
-<%- set windows_full_testrun_timeout_value = full_testrun_timeout_value + 30 %>
-
-  <%- for os in test_salt_listing["windows"] %>
-
-  <{ os.slug.replace(".", "") }>:
-    <%- do test_salt_needs.append(os.slug.replace(".", "")) %>
-    name: <{ os.display_name }> Test
-    <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    <%- else %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
-    <%- endif %>
+<%- if workflow_slug in ("nightly", "scheduled") %>
+  <%- set timeout_value = 360 %>
+<%- else %>
+  <%- set timeout_value = 180 %>
+<%- endif %>
+  test:
+    name: Test Salt
+    if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test'] }}
     needs:
       - prepare-workflow
       - build-ci-deps
-    uses: ./.github/workflows/test-action-windows.yml
+    uses: ./.github/workflows/test-action.yml
     with:
-      distro-slug: <{ os.slug }>
       nox-session: ci-test-onedir
-      platform: windows
-      arch: amd64
       nox-version: <{ nox_version  }>
-      gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
+      python-version: "<{ gh_actions_workflows_python_version }>"
+      testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
       salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
       cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
       skip-code-coverage: <{ skip_test_coverage_check }>
       workflow-slug: <{ workflow_slug }>
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ windows_full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
-
-  <%- endfor %>
-
-
-  <%- for os in test_salt_listing["macos"] %>
-
-  <{ os.slug.replace(".", "") }>:
-    <%- do test_salt_needs.append(os.slug.replace(".", "")) %>
-    name: <{ os.display_name }> Test
-    <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
-    <%- else %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
-    <%- endif %>
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-macos.yml
-    with:
-      distro-slug: <{ os.slug }>
-      runner: <{ os.runner }>
-      nox-session: ci-test-onedir
-      platform: macos
-      arch: <{ os.arch }>
-      nox-version: <{ nox_version  }>
-      gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
-      skip-code-coverage: <{ skip_test_coverage_check }>
-      workflow-slug: <{ workflow_slug }>
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
-
-  <%- endfor %>
-
-  <%- for os in test_salt_listing["linux"] %>
-    <%- set job_name = "{}{}".format(os.slug.replace(".", ""), os.fips and '-fips' or '') %>
-
-  <{ job_name }>:
-    <%- do test_salt_needs.append(job_name) %>
-    name: <{ os.display_name }> Test<%- if os.fips %> (fips)<%- endif %>
-    <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
-    <%- else %>
-    if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
-    <%- endif %>
-    needs:
-      - prepare-workflow
-      - build-ci-deps
-    uses: ./.github/workflows/test-action-linux.yml
-    with:
-      distro-slug: <{ os.slug }>
-      nox-session: ci-test-onedir
-      platform: linux
-      arch: <{ os.arch }>
-      nox-version: <{ nox_version  }>
-      gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
-      testrun: ${{ needs.prepare-workflow.outputs.testrun }}
-      salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
-      cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
-      skip-code-coverage: <{ skip_test_coverage_check }>
-      workflow-slug: <{ workflow_slug }>
-      timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
-    <%- if os.fips %>
-      fips: true
-    <%- endif %>
-
-  <%- endfor %>
+      default-timeout: <{ timeout_value }>
+      matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
+      linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
diff --git a/.github/workflows/templates/trigger-branch-workflows.yml.jinja b/.github/workflows/templates/trigger-branch-workflows.yml.jinja
index de23643bd23..11cb57f6828 100644
--- a/.github/workflows/templates/trigger-branch-workflows.yml.jinja
+++ b/.github/workflows/templates/trigger-branch-workflows.yml.jinja
@@ -6,7 +6,7 @@
   <%- do conclusion_needs.append(job_name) %>
     name: Trigger Branch Workflows
     if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     needs:
       - workflow-requirements
 
diff --git a/.github/workflows/templates/workflow-requirements-check.yml.jinja b/.github/workflows/templates/workflow-requirements-check.yml.jinja
index 67e04eef3e7..9458fe0423a 100644
--- a/.github/workflows/templates/workflow-requirements-check.yml.jinja
+++ b/.github/workflows/templates/workflow-requirements-check.yml.jinja
@@ -4,7 +4,7 @@
   <{ job_name }>:
   <%- do prepare_workflow_needs.append(job_name) %>
     name: Check Workflow Requirements
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-22.04
     outputs:
       requirements-met: ${{ steps.check-requirements.outputs.requirements-met }}
     steps:
diff --git a/.github/workflows/test-action-linux.yml b/.github/workflows/test-action-linux.yml
deleted file mode 100644
index 3a25fe5f57f..00000000000
--- a/.github/workflows/test-action-linux.yml
+++ /dev/null
@@ -1,402 +0,0 @@
----
-name: Test Artifact
-
-on:
-  workflow_call:
-    inputs:
-      distro-slug:
-        required: true
-        type: string
-        description: The OS slug to run tests against
-      nox-session:
-        required: true
-        type: string
-        description: The nox session to run
-      testrun:
-        required: true
-        type: string
-        description: JSON string containing information about what and how to run the test suite
-      salt-version:
-        type: string
-        required: true
-        description: The Salt version to set prior to running tests.
-      cache-prefix:
-        required: true
-        type: string
-        description: Seed used to invalidate caches
-      platform:
-        required: true
-        type: string
-        description: The platform being tested
-      arch:
-        required: true
-        type: string
-        description: The platform arch being tested
-      nox-version:
-        required: true
-        type: string
-        description: The nox version to install
-      timeout-minutes:
-        required: true
-        type: number
-        description: Timeout, in minutes, for the test job
-      gh-actions-python-version:
-        required: false
-        type: string
-        description: The python version to run tests with
-        default: "3.10"
-      fips:
-        required: false
-        type: boolean
-        default: false
-        description: Test run with FIPS enabled
-      package-name:
-        required: false
-        type: string
-        description: The onedir package name to use
-        default: salt
-      skip-code-coverage:
-        required: false
-        type: boolean
-        description: Skip code coverage
-        default: false
-      workflow-slug:
-        required: false
-        type: string
-        description: Which workflow is running.
-        default: ci
-
-env:
-  COLUMNS: 190
-  AWS_MAX_ATTEMPTS: "10"
-  AWS_RETRY_MODE: "adaptive"
-  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
-  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
-  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
-  PIP_DISABLE_PIP_VERSION_CHECK: "1"
-  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
-
-jobs:
-
-  generate-matrix:
-    name: Test Matrix
-    runs-on: ubuntu-latest
-    outputs:
-      matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
-      build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-        env:
-          PIP_INDEX_URL: https://pypi.org/simple
-
-      - name: Generate Test Matrix
-        id: generate-matrix
-        run: |
-          tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }}
-
-  test:
-    name: Test
-    runs-on:
-      - self-hosted
-      - linux
-      - bastion
-    timeout-minutes: ${{ inputs.timeout-minutes }}
-    needs:
-      - generate-matrix
-    strategy:
-      fail-fast: false
-      matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
-    env:
-      SALT_TRANSPORT: ${{ matrix.transport }}
-      TEST_GROUP: ${{ matrix.test-group || 1 }}
-
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: "Set `TIMESTAMP` environment variable"
-        shell: bash
-        run: |
-          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Salt Version
-        run: |
-          echo "${{ inputs.salt-version }}" > salt/_version.txt
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-          path: artifacts/
-
-      - name: Decompress Onedir Tarball
-        shell: bash
-        run: |
-          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
-          cd artifacts
-          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-
-      - name: Download nox.linux.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
-        uses: actions/download-artifact@v4
-        with:
-          name: nox-linux-${{ inputs.arch }}-${{ inputs.nox-session }}
-
-      - name: PyPi Proxy
-        run: |
-          sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Download testrun-changed-files.txt
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
-        uses: actions/download-artifact@v4
-        with:
-          name: testrun-changed-files.txt
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Start VM
-        id: spin-up-vm
-        env:
-          TESTS_CHUNK: ${{ matrix.tests-chunk }}
-        run: |
-          tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
-
-      - name: List Free Space
-        run: |
-          tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
-
-      - name: Upload Checkout To VM
-        run: |
-          tools --timestamps vm rsync ${{ inputs.distro-slug }}
-
-      - name: Decompress .nox Directory
-        run: |
-          tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
-
-      - name: Show System Info
-        run: |
-          tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
-            --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }}
-
-      - name: Run Changed Tests
-        id: run-fast-changed-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --core-tests --slow-tests --suppress-no-test-exit-code \
-            --from-filenames=testrun-changed-files.txt
-
-      - name: Run Fast Tests
-        id: run-fast-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
-            ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
-
-      - name: Run Slow Tests
-        id: run-slow-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests
-
-      - name: Run Core Tests
-        id: run-core-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests
-
-      - name: Run Flaky Tests
-        id: run-flaky-tests
-        if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail
-
-      - name: Run Full Tests
-        id: run-full-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
-            -E TEST_GROUP ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \
-            --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
-
-      - name: Combine Coverage Reports
-        if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
-        run: |
-          tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
-
-      - name: Download Test Run Artifacts
-        id: download-artifacts-from-vm
-        if: always() && steps.spin-up-vm.outcome == 'success'
-        run: |
-          tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
-          # Delete the salt onedir, we won't need it anymore and it will prevent
-          # from it showing in the tree command below
-          rm -rf artifacts/salt*
-          tree -a artifacts
-          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
-            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}${{ inputs.fips && '.fips' || '' }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }}
-          fi
-
-      - name: Destroy VM
-        if: always()
-        run: |
-          tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
-
-      - name: Upload Code Coverage Test Run Artifacts
-        if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/coverage/
-
-      - name: Upload JUnit XML Test Run Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/xml-unittests-output/
-
-      - name: Upload Test Run Log Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/logs
-
-  report:
-    name: Test Reports
-    if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-    runs-on: ubuntu-latest
-    needs:
-      - test
-      - generate-matrix
-    env:
-      PIP_INDEX_URL: https://pypi.org/simple
-
-    steps:
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Merge JUnit XML Test Run Artifacts
-        if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
-          pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Merge Log Test Run Artifacts
-        if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
-          pattern: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Merge Code Coverage Test Run Artifacts
-        if: ${{ inputs.skip-code-coverage == false }}
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
-          pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Download Code Coverage Test Run Artifacts
-        uses: actions/download-artifact@v4
-        if: ${{ inputs.skip-code-coverage == false }}
-        id: download-coverage-artifacts
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
-          path: artifacts/coverage/
-
-      - name: Show Downloaded Test Run Artifacts
-        if: ${{ inputs.skip-code-coverage == false }}
-        run: |
-          tree -a artifacts
-
-      - name: Install Nox
-        run: |
-          python3 -m pip install 'nox==${{ inputs.nox-version }}'
-
-      - name: Create XML Coverage Reports
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
-        run: |
-          nox --force-color -e create-xml-coverage-reports
-          mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}${{ inputs.fips && '..fips' || '' }}..${{ inputs.nox-session }}.xml
-          mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}${{ inputs.fips && '..fips' || '' }}..${{ inputs.nox-session }}.xml
-
-      - name: Report Salt Code Coverage
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          nox --force-color -e report-coverage -- salt
-
-      - name: Report Combined Code Coverage
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          nox --force-color -e report-coverage
-
-      - name: Rename Code Coverage DB
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}${{ inputs.fips && '.fips' || '' }}.${{ inputs.nox-session }}
-
-      - name: Upload Code Coverage DB
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
-          path: artifacts/coverage
diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml
deleted file mode 100644
index 38bf4204a9b..00000000000
--- a/.github/workflows/test-action-macos.yml
+++ /dev/null
@@ -1,437 +0,0 @@
----
-name: Test Artifact(macOS)
-
-on:
-  workflow_call:
-    inputs:
-      distro-slug:
-        required: true
-        type: string
-        description: The OS slug to run tests against
-      runner:
-        required: true
-        type: string
-        description: The GitHub runner name
-      nox-session:
-        required: true
-        type: string
-        description: The nox session to run
-      testrun:
-        required: true
-        type: string
-        description: JSON string containing information about what and how to run the test suite
-      gh-actions-python-version:
-        required: false
-        type: string
-        description: The python version to run tests with
-        default: "3.11"
-      salt-version:
-        type: string
-        required: true
-        description: The Salt version to set prior to running tests.
-      cache-prefix:
-        required: true
-        type: string
-        description: Seed used to invalidate caches
-      platform:
-        required: true
-        type: string
-        description: The platform being tested
-      arch:
-        required: true
-        type: string
-        description: The platform arch being tested
-      nox-version:
-        required: true
-        type: string
-        description: The nox version to install
-      timeout-minutes:
-        required: true
-        type: number
-        description: Timeout, in minutes, for the test job
-      package-name:
-        required: false
-        type: string
-        description: The onedir package name to use
-        default: salt
-      skip-code-coverage:
-        required: false
-        type: boolean
-        description: Skip code coverage
-        default: false
-      workflow-slug:
-        required: false
-        type: string
-        description: Which workflow is running.
-        default: ci
-
-env:
-  COLUMNS: 190
-  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
-  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
-  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
-  PIP_DISABLE_PIP_VERSION_CHECK: "1"
-  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
-
-jobs:
-
-  generate-matrix:
-    name: Test Matrix
-    runs-on: ubuntu-latest
-    outputs:
-      matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
-      build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-        env:
-          PIP_INDEX_URL: https://pypi.org/simple
-
-      - name: Generate Test Matrix
-        id: generate-matrix
-        run: |
-          tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ inputs.distro-slug }}
-
-  test:
-    name: Test
-    runs-on: ${{ inputs.runner }}
-    timeout-minutes: ${{ inputs.timeout-minutes }}
-    needs:
-      - generate-matrix
-    strategy:
-      fail-fast: false
-      matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
-    env:
-      SALT_TRANSPORT: ${{ matrix.transport }}
-
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: "Set `TIMESTAMP` environment variable"
-        shell: bash
-        run: |
-          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Salt Version
-        run: |
-          echo "${{ inputs.salt-version }}" > salt/_version.txt
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-          path: artifacts/
-
-      - name: Decompress Onedir Tarball
-        shell: bash
-        run: |
-          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
-          cd artifacts
-          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-
-      - name: Install System Dependencies
-        run: |
-          brew install tree
-
-      - name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
-        uses: actions/download-artifact@v4
-        with:
-          name: nox-macos-${{ inputs.arch }}-${{ inputs.nox-session }}
-
-      - name: Set up Python ${{ inputs.gh-actions-python-version }}
-        uses: actions/setup-python@v5
-        with:
-          python-version: "${{ inputs.gh-actions-python-version }}"
-
-      - name: Install Nox
-        run: |
-          python3 -m pip install 'nox==${{ inputs.nox-version }}'
-        env:
-          PIP_INDEX_URL: https://pypi.org/simple
-
-      - name: Decompress .nox Directory
-        run: |
-          nox --force-color -e decompress-dependencies -- macos ${{ inputs.arch }}
-
-      - name: Download testrun-changed-files.txt
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
-        uses: actions/download-artifact@v4
-        with:
-          name: testrun-changed-files.txt
-
-      - name: Show System Info
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_SYSTEM_INFO_ONLY: "1"
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }}
-
-      - name: Run Changed Tests
-        id: run-fast-changed-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_TEST_SELECTION: "0"
-          PRINT_TEST_PLAN_ONLY: "0"
-          PRINT_SYSTEM_INFO: "0"
-          RERUN_FAILURES: "1"
-          GITHUB_ACTIONS_PIPELINE: "1"
-          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
-          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
-          COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-            -k "mac or darwin" --core-tests --slow-tests --suppress-no-test-exit-code \
-            --from-filenames=testrun-changed-files.txt
-
-      - name: Run Fast Tests
-        id: run-fast-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_TEST_SELECTION: "0"
-          PRINT_TEST_PLAN_ONLY: "0"
-          PRINT_SYSTEM_INFO: "0"
-          RERUN_FAILURES: "1"
-          GITHUB_ACTIONS_PIPELINE: "1"
-          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
-          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
-          COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-            -k "mac or darwin" --suppress-no-test-exit-code
-
-      - name: Run Slow Tests
-        id: run-slow-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_TEST_SELECTION: "0"
-          PRINT_TEST_PLAN_ONLY: "0"
-          PRINT_SYSTEM_INFO: "0"
-          RERUN_FAILURES: "1"
-          GITHUB_ACTIONS_PIPELINE: "1"
-          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
-          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
-          COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-            -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests
-
-      - name: Run Core Tests
-        id: run-core-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_TEST_SELECTION: "0"
-          PRINT_TEST_PLAN_ONLY: "0"
-          PRINT_SYSTEM_INFO: "0"
-          RERUN_FAILURES: "1"
-          GITHUB_ACTIONS_PIPELINE: "1"
-          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
-          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
-          COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-            -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests
-
-      - name: Run Flaky Tests
-        id: run-flaky-tests
-        if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_TEST_SELECTION: "0"
-          PRINT_TEST_PLAN_ONLY: "0"
-          PRINT_SYSTEM_INFO: "0"
-          RERUN_FAILURES: "1"
-          GITHUB_ACTIONS_PIPELINE: "1"
-          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
-          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
-          COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-            -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --flaky-jail
-
-      - name: Run Full Tests
-        id: run-full-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_TEST_SELECTION: "0"
-          PRINT_TEST_PLAN_ONLY: "0"
-          PRINT_SYSTEM_INFO: "0"
-          RERUN_FAILURES: "1"
-          GITHUB_ACTIONS_PIPELINE: "1"
-          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
-          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
-          COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-            --slow-tests --core-tests -k "mac or darwin"
-
-      - name: Fix file ownership
-        run: |
-          sudo chown -R "$(id -un)" .
-
-      - name: Combine Coverage Reports
-        if: always() && inputs.skip-code-coverage == false
-        run: |
-          nox --force-color -e combine-coverage
-
-      - name: Prepare Test Run Artifacts
-        id: download-artifacts-from-vm
-        if: always()
-        run: |
-          # Delete the salt onedir, we won't need it anymore and it will prevent
-          # from it showing in the tree command below
-          rm -rf artifacts/salt*
-          tree -a artifacts
-          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
-            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
-          fi
-
-      - name: Upload Code Coverage Test Run Artifacts
-        if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/coverage/
-
-      - name: Upload JUnit XML Test Run Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/xml-unittests-output/
-
-      - name: Upload Test Run Log Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/logs
-
-  report:
-    name: Test Reports
-    if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-    runs-on: ubuntu-latest
-    needs:
-      - test
-      - generate-matrix
-    env:
-      PIP_INDEX_URL: https://pypi.org/simple
-
-    steps:
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Merge JUnit XML Test Run Artifacts
-        if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Merge Log Test Run Artifacts
-        if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          pattern: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Merge Code Coverage Test Run Artifacts
-        if: ${{ inputs.skip-code-coverage == false }}
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Download Code Coverage Test Run Artifacts
-        uses: actions/download-artifact@v4
-        if: ${{ inputs.skip-code-coverage == false }}
-        id: download-coverage-artifacts
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          path: artifacts/coverage/
-
-      - name: Show Downloaded Test Run Artifacts
-        if: ${{ inputs.skip-code-coverage == false }}
-        run: |
-          tree -a artifacts
-
-      - name: Set up Python ${{ inputs.gh-actions-python-version }}
-        uses: actions/setup-python@v5
-        with:
-          python-version: "${{ inputs.gh-actions-python-version }}"
-
-      - name: Install Nox
-        run: |
-          python3 -m pip install 'nox==${{ inputs.nox-version }}'
-
-      - name: Create XML Coverage Reports
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
-        run: |
-          nox --force-color -e create-xml-coverage-reports
-          mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
-          mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
-
-      - name: Report Salt Code Coverage
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          nox --force-color -e report-coverage -- salt
-
-      - name: Report Combined Code Coverage
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          nox --force-color -e report-coverage
-
-      - name: Rename Code Coverage DB
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
-
-      - name: Upload Code Coverage DB
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
-          path: artifacts/coverage
diff --git a/.github/workflows/test-action-windows.yml b/.github/workflows/test-action-windows.yml
deleted file mode 100644
index 4110a3a644d..00000000000
--- a/.github/workflows/test-action-windows.yml
+++ /dev/null
@@ -1,403 +0,0 @@
----
-name: Test Artifact
-
-on:
-  workflow_call:
-    inputs:
-      distro-slug:
-        required: true
-        type: string
-        description: The OS slug to run tests against
-      nox-session:
-        required: true
-        type: string
-        description: The nox session to run
-      testrun:
-        required: true
-        type: string
-        description: JSON string containing information about what and how to run the test suite
-      salt-version:
-        type: string
-        required: true
-        description: The Salt version to set prior to running tests.
-      cache-prefix:
-        required: true
-        type: string
-        description: Seed used to invalidate caches
-      platform:
-        required: true
-        type: string
-        description: The platform being tested
-      arch:
-        required: true
-        type: string
-        description: The platform arch being tested
-      nox-version:
-        required: true
-        type: string
-        description: The nox version to install
-      timeout-minutes:
-        required: true
-        type: number
-        description: Timeout, in minutes, for the test job
-      gh-actions-python-version:
-        required: false
-        type: string
-        description: The python version to run tests with
-        default: "3.10"
-      fips:
-        required: false
-        type: boolean
-        default: false
-        description: Test run with FIPS enabled
-      package-name:
-        required: false
-        type: string
-        description: The onedir package name to use
-        default: salt
-      skip-code-coverage:
-        required: false
-        type: boolean
-        description: Skip code coverage
-        default: false
-      workflow-slug:
-        required: false
-        type: string
-        description: Which workflow is running.
-        default: ci
-
-env:
-  COLUMNS: 190
-  AWS_MAX_ATTEMPTS: "10"
-  AWS_RETRY_MODE: "adaptive"
-  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
-  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
-  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
-  PIP_DISABLE_PIP_VERSION_CHECK: "1"
-  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
-
-jobs:
-
-  generate-matrix:
-    name: Test Matrix
-    runs-on: ubuntu-latest
-    outputs:
-      matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
-      build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-        env:
-          PIP_INDEX_URL: https://pypi.org/simple
-
-      - name: Generate Test Matrix
-        id: generate-matrix
-        run: |
-          tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }}
-
-  test:
-    name: Test
-    runs-on:
-      - self-hosted
-      - linux
-      - bastion
-    timeout-minutes: ${{ inputs.timeout-minutes }}
-    needs:
-      - generate-matrix
-    strategy:
-      fail-fast: false
-      matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
-    env:
-      SALT_TRANSPORT: ${{ matrix.transport }}
-      TEST_GROUP: ${{ matrix.test-group || 1 }}
-
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: "Set `TIMESTAMP` environment variable"
-        shell: bash
-        run: |
-          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Salt Version
-        run: |
-          echo "${{ inputs.salt-version }}" > salt/_version.txt
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-          path: artifacts/
-
-      - name: Decompress Onedir Tarball
-        shell: bash
-        run: |
-          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
-          cd artifacts
-          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-
-      - name: Download nox.windows.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
-        uses: actions/download-artifact@v4
-        with:
-          name: nox-windows-${{ inputs.arch }}-${{ inputs.nox-session }}
-
-      - name: PyPi Proxy
-        run: |
-          sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Download testrun-changed-files.txt
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
-        uses: actions/download-artifact@v4
-        with:
-          name: testrun-changed-files.txt
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Start VM
-        id: spin-up-vm
-        env:
-          TESTS_CHUNK: ${{ matrix.tests-chunk }}
-        run: |
-          tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
-
-      - name: List Free Space
-        run: |
-          tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
-
-      - name: Upload Checkout To VM
-        run: |
-          tools --timestamps vm rsync ${{ inputs.distro-slug }}
-
-      - name: Decompress .nox Directory
-        run: |
-          tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
-
-      - name: Show System Info
-        run: |
-          tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
-            --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }}
-
-      - name: Run Changed Tests
-        id: run-fast-changed-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --core-tests --slow-tests --suppress-no-test-exit-code \
-            --from-filenames=testrun-changed-files.txt
-
-      - name: Run Fast Tests
-        id: run-fast-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
-            ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
-
-      - name: Run Slow Tests
-        id: run-slow-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests
-
-      - name: Run Core Tests
-        id: run-core-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests
-
-      - name: Run Flaky Tests
-        id: run-flaky-tests
-        if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail
-
-      - name: Run Full Tests
-        id: run-full-tests
-        if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-            --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
-            -E TEST_GROUP ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \
-            --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
-
-      - name: Combine Coverage Reports
-        if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
-        run: |
-          tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
-
-      - name: Download Test Run Artifacts
-        id: download-artifacts-from-vm
-        if: always() && steps.spin-up-vm.outcome == 'success'
-        run: |
-          tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
-          # Delete the salt onedir, we won't need it anymore and it will prevent
-          # from it showing in the tree command below
-          rm -rf artifacts/salt*
-          tree -a artifacts
-          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
-            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }}
-          fi
-
-      - name: Destroy VM
-        if: always()
-        run: |
-          tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
-
-      - name: Upload Code Coverage Test Run Artifacts
-        if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/coverage/
-
-      - name: Upload JUnit XML Test Run Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/xml-unittests-output/
-
-      - name: Upload Test Run Log Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts/logs
-
-
-  report:
-    name: Test Reports
-    if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-    runs-on: ubuntu-latest
-    needs:
-      - test
-      - generate-matrix
-    env:
-      PIP_INDEX_URL: https://pypi.org/simple
-
-    steps:
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Merge JUnit XML Test Run Artifacts
-        if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Merge Log Test Run Artifacts
-        if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          pattern: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Merge Code Coverage Test Run Artifacts
-        if: ${{ inputs.skip-code-coverage == false }}
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
-          separate-directories: false
-          delete-merged: true
-
-      - name: Download Code Coverage Test Run Artifacts
-        uses: actions/download-artifact@v4
-        if: ${{ inputs.skip-code-coverage == false }}
-        id: download-coverage-artifacts
-        with:
-          name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
-          path: artifacts/coverage/
-
-      - name: Show Downloaded Test Run Artifacts
-        if: ${{ inputs.skip-code-coverage == false }}
-        run: |
-          tree -a artifacts
-
-      - name: Install Nox
-        run: |
-          python3 -m pip install 'nox==${{ inputs.nox-version }}'
-
-      - name: Create XML Coverage Reports
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
-        run: |
-          nox --force-color -e create-xml-coverage-reports
-          mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
-          mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
-
-      - name: Report Salt Code Coverage
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          nox --force-color -e report-coverage -- salt
-
-      - name: Report Combined Code Coverage
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          nox --force-color -e report-coverage
-
-      - name: Rename Code Coverage DB
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        continue-on-error: true
-        run: |
-          mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
-
-      - name: Upload Code Coverage DB
-        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
-          path: artifacts/coverage
diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml
new file mode 100644
index 00000000000..6ac1fdccba2
--- /dev/null
+++ b/.github/workflows/test-action.yml
@@ -0,0 +1,1392 @@
+---
+name: Test Artifact
+
+on:
+  workflow_call:
+    inputs:
+      nox-session:
+        required: true
+        type: string
+        description: The nox session to run
+      testrun:
+        required: true
+        type: string
+        description: JSON string containing information about what and how to run the test suite
+      python-version:
+        required: false
+        type: string
+        description: The python version to run tests with
+        default: "3.10"
+      salt-version:
+        type: string
+        required: true
+        description: The Salt version to set prior to running tests.
+      cache-prefix:
+        required: true
+        type: string
+        description: Seed used to invalidate caches
+      nox-version:
+        required: true
+        type: string
+        description: The nox version to install
+      package-name:
+        required: false
+        type: string
+        description: The onedir package name to use
+        default: salt
+      skip-code-coverage:
+        required: false
+        type: boolean
+        description: Skip code coverage
+        default: false
+      workflow-slug:
+        required: false
+        type: string
+        description: Which workflow is running.
+        default: ci
+      default-timeout:
+        required: false
+        type: number
+        description: Timeout, in minutes, for the test job(Default 360, 6 hours).
+        default: 360
+      matrix:
+        required: true
+        type: string
+        description: Json job matrix config
+      linux_arm_runner:
+        required: true
+        type: string
+        description: Json job matrix config
+
+env:
+  COLUMNS: 190
+  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+  PIP_DISABLE_PIP_VERSION_CHECK: "1"
+  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+
+jobs:
+
+  test-linux:
+    name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }}
+    runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
+    if: toJSON(fromJSON(inputs.matrix)['linux-x86_64']) != '[]'
+    # Full test runs. Each chunk should never take more than 2 hours.
+    # Partial test runs(no chunk parallelization), 6 Hours
+    timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['linux-x86_64'] }}
+    steps:
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: "Set `TIMESTAMP` environment variable"
+        shell: bash
+        run: |
+          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
+
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: Free Disk Space Before Build
+        run: |
+          echo "Disk space before cleanup:"
+          df -h
+          sudo rm -rf /usr/local/.ghcup
+          sudo rm -rf /opt/hostedtoolcache/CodeQL
+          sudo rm -rf /usr/local/lib/android/sdk/ndk
+          sudo rm -rf /usr/share/dotnet
+          sudo rm -rf /opt/ghc
+          sudo rm -rf /usr/local/share/boost
+          sudo apt-get clean
+          echo "Disk space after cleanup:"
+          df -h
+
+      - name: Setup Salt Version
+        run: |
+          echo "${{ inputs.salt-version }}" > salt/_version.txt
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+          path: artifacts/
+
+      - name: Decompress Onedir Tarball
+        shell: bash
+        run: |
+          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
+          cd artifacts
+          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+
+      - name: "Configure docker"
+        run: |
+          sudo sed -i '/LimitNOFILE=infinity/c\LimitNOFILE=1048576' /lib/systemd/system/containerd.service
+          sudo systemctl daemon-reload
+          sudo systemctl restart containerd
+          cat /lib/systemd/system/containerd.service
+
+      - name: "Pull container ${{ matrix.container }}"
+        run: |
+          docker pull ${{ matrix.container }}
+
+      - name: "Create docker network"
+        run: |
+           docker network create -o "com.docker.network.driver.mtu=1500" --ipv6 --subnet 2001:db8::/64 ip6net
+
+      - name: "Host network config"
+        run: |
+          ip a
+
+      - name: Free Memory Before Container
+        shell: bash
+        run: |
+          free -h
+
+      - name: "Create container ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker \
+          create --name ${{ github.run_id }}_salt-test \
+          --workdir /__w/salt/salt \
+          --privileged \
+          --ulimit="nofile=262144:262144" \
+          -e "HOME=/github/home" \
+          -e GITHUB_ACTIONS=true \
+          -e CI=true \
+          -e SKIP_REQUIREMENTS_INSTALL=1 \
+          -e PRINT_TEST_SELECTION=0 \
+          -e PRINT_TEST_PLAN_ONLY=0 \
+          -e PRINT_SYSTEM_INFO=0 \
+          -e RERUN_FAILURES=1 \
+          -e GITHUB_ACTIONS_PIPELINE=1 \
+          -e SKIP_INITIAL_ONEDIR_FAILURES=1 \
+          -e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \
+          -e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \
+          -e CONVERAGE_CONTEXT=${{ matrix.slug }} \
+          -e COLUMNS=190 \
+          -e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \
+          -e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \
+          -e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \
+          -e PIP_DISABLE_PIP_VERSION_CHECK="1" \
+          -e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \
+          -e SALT_TRANSPORT=${{ matrix.transport }} \
+          -e LANG="en_US.UTF-8" \
+          -e SHELL=/bin/bash \
+          -v "/home/runner/work":"/__w" \
+          -v "/tmp/":"/var/lib/docker" \
+          --entrypoint "/usr/lib/systemd/systemd" \
+          ${{ matrix.container }} \
+          --systemd --unit rescue.target
+
+      - name: "Start container ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker start ${{ github.run_id }}_salt-test
+
+      - name: "Show container inspect ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker inspect ${{ github.run_id }}_salt-test
+
+      - name: Download nox.linux.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
+        uses: actions/download-artifact@v4
+        with:
+          name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }}
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+        env:
+          PIP_INDEX_URL: https://pypi.org/simple
+
+      - name: Decompress .nox Directory
+        run: |
+          docker exec ${{ github.run_id}}_salt-test python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }}
+
+      - name: Download testrun-changed-files.txt
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        uses: actions/download-artifact@v4
+        with:
+          name: testrun-changed-files.txt
+
+      - name: Current Directory
+        run: |
+          pwd
+
+      - name: Show System Info
+        run: |
+          docker exec -e SKIP_REQUIREMENTS_INSTALL=1 -e PRINT_SYSTEM_INFO_ONLY=1 ${{ github.run_id}}_salt-test \
+          python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }}
+
+      - name: Free Space on Host
+        shell: bash
+        run: |
+          df -h
+
+      - name: Show container network info
+        shell: bash
+        run: |
+          docker exec ${{ github.run_id}}_salt-test ip addr
+
+      - name: Show container processes
+        shell: bash
+        run: |
+          docker exec ${{ github.run_id}}_salt-test ps auxf
+
+      - name: Free Space on Container
+        shell: bash
+        run: |
+          docker exec ${{ github.run_id}}_salt-test df -h
+
+      - name: Free Memory
+        shell: bash
+        run: |
+          free -h
+
+      - name: Configure apparmor
+        run: |
+          # Apparmor's unix-chkpwd profile gets in the way of tests needing to
+          # authenticate from inside a container.
+          cat <<'EOF' | sudo tee /etc/apparmor.d/unix-chkpwd
+          abi <abi/4.0>,
+          include <tunables/global>
+          profile unix-chkpwd /{,usr/}{,s}bin/unix_chkpwd flags=(unconfined) {
+            include <abstractions/base>
+            include <abstractions/nameservice>
+            # To write records to the kernel auditing log.
+            capability audit_write,
+            network netlink raw,
+            /{,usr/}{,s}bin/unix_chkpwd mr,
+            /etc/shadow r,
+            # systemd userdb, used in nspawn
+            /run/host/userdb/*.user r,
+            /run/host/userdb/*.user-privileged r,
+            # file_inherit
+            owner /dev/tty[0-9]* rw,
+            include if exists <local/unix-chkpwd>
+          }
+          EOF
+          sudo systemctl restart apparmor
+          sudo aa-status
+
+      - name: Run Changed Tests
+        id: run-fast-changed-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --core-tests --slow-tests --suppress-no-test-exit-code --from-filenames=testrun-changed-files.txt
+
+      - name: Run Fast Tests
+        id: run-fast-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code
+
+      - name: Run Slow Tests
+        id: run-slow-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code --no-fast-tests --slow-tests
+
+      - name: Run Core Tests
+        id: run-core-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code --no-fast-tests --core-tests
+
+      - name: Run Flaky Tests
+        id: run-flaky-tests
+        if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code --no-fast-tests --flaky-jail
+
+      - name: Run Full Tests
+        id: run-full-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --slow-tests --core-tests --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
+
+      - name: Stop Container
+        run: |
+          docker container stop ${{ github.run_id}}_salt-test
+
+      - name: Remove Container
+        run: |
+          docker container rm ${{ github.run_id}}_salt-test
+
+      - name: Remove Container Image
+        run: |
+          docker image rm ${{ matrix.container }}
+
+      - name: Fix file ownership
+        run: |
+          sudo chown -R "$(id -un)" .
+
+      - name: Combine Coverage Reports
+        if: always() && inputs.skip-code-coverage == false
+        run: |
+          nox --force-color -e combine-coverage
+
+      - name: Prepare Test Run Artifacts
+        id: download-artifacts-from-vm
+        if: always()
+        run: |
+          # Delete the salt onedir, we won't need it anymore and it will prevent
+          # from it showing in the tree command below
+          sudo rm -rf artifacts/salt*
+          tree -a artifacts
+          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
+            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
+          fi
+
+      - name: Upload Code Coverage Test Run Artifacts
+        if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-coverage-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/coverage/
+          include-hidden-files: true
+
+      - name: Upload JUnit XML Test Run Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-junit-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/xml-unittests-output/
+          include-hidden-files: true
+
+      - name: Upload Test Run Log Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/logs
+          include-hidden-files: true
+
+  test-linux-arm64:
+    name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }}
+    runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-22.04' || inputs.linux_arm_runner }}
+    if: toJSON(fromJSON(inputs.matrix)['linux-arm64']) != '[]'
+    # Full test runs. Each chunk should never take more than 2 hours.
+    # Partial test runs(no chunk parallelization), 6 Hours
+    timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['linux-arm64'] }}
+    steps:
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: "Set `TIMESTAMP` environment variable"
+        shell: bash
+        run: |
+          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
+
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: Free Disk Space Before Build
+        run: |
+          echo "Disk space before cleanup:"
+          df -h
+          sudo rm -rf /usr/local/.ghcup
+          sudo rm -rf /opt/hostedtoolcache/CodeQL
+          sudo rm -rf /usr/local/lib/android/sdk/ndk
+          sudo rm -rf /usr/share/dotnet
+          sudo rm -rf /opt/ghc
+          sudo rm -rf /usr/local/share/boost
+          sudo apt-get clean
+          echo "Disk space after cleanup:"
+          df -h
+
+      - name: Setup Salt Version
+        run: |
+          echo "${{ inputs.salt-version }}" > salt/_version.txt
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+          path: artifacts/
+
+      - name: Decompress Onedir Tarball
+        shell: bash
+        run: |
+          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
+          cd artifacts
+          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+
+      - name: "Configure docker"
+        run: |
+          sudo sed -i '/LimitNOFILE=infinity/c\LimitNOFILE=1048576' /lib/systemd/system/containerd.service
+          sudo systemctl daemon-reload
+          sudo systemctl restart containerd
+          cat /lib/systemd/system/containerd.service
+
+      - name: "Pull container ${{ matrix.container }}"
+        run: |
+          docker pull ${{ matrix.container }}
+
+      - name: "Create docker network"
+        run: |
+           docker network create -o "com.docker.network.driver.mtu=1500" --ipv6 --subnet 2001:db8::/64 ip6net
+
+      - name: "Host network config"
+        run: |
+          ip a
+
+      - name: Free Memory Before Container
+        shell: bash
+        run: |
+          free -h
+
+      - name: "Create container ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker \
+          create --name ${{ github.run_id }}_salt-test \
+          --workdir /__w/salt/salt \
+          --privileged \
+          --ulimit="nofile=262144:262144" \
+          -e "HOME=/github/home" \
+          -e GITHUB_ACTIONS=true \
+          -e CI=true \
+          -e SKIP_REQUIREMENTS_INSTALL=1 \
+          -e PRINT_TEST_SELECTION=0 \
+          -e PRINT_TEST_PLAN_ONLY=0 \
+          -e PRINT_SYSTEM_INFO=0 \
+          -e RERUN_FAILURES=1 \
+          -e GITHUB_ACTIONS_PIPELINE=1 \
+          -e SKIP_INITIAL_ONEDIR_FAILURES=1 \
+          -e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \
+          -e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \
+          -e CONVERAGE_CONTEXT=${{ matrix.slug }} \
+          -e COLUMNS=190 \
+          -e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \
+          -e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \
+          -e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \
+          -e PIP_DISABLE_PIP_VERSION_CHECK="1" \
+          -e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \
+          -e SALT_TRANSPORT=${{ matrix.transport }} \
+          -e LANG="en_US.UTF-8" \
+          -e SHELL=/bin/bash \
+          -v "/home/runner/work":"/__w" \
+          -v "/tmp/":"/var/lib/docker" \
+          --entrypoint "/usr/lib/systemd/systemd" \
+          ${{ matrix.container }} \
+          --systemd --unit rescue.target
+
+      - name: "Start container ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker start ${{ github.run_id }}_salt-test
+
+      - name: "Show container inspect ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker inspect ${{ github.run_id }}_salt-test
+
+      - name: Download nox.linux.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
+        uses: actions/download-artifact@v4
+        with:
+          name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }}
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+        env:
+          PIP_INDEX_URL: https://pypi.org/simple
+
+      - name: Decompress .nox Directory
+        run: |
+          docker exec ${{ github.run_id}}_salt-test python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }}
+
+      - name: Download testrun-changed-files.txt
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        uses: actions/download-artifact@v4
+        with:
+          name: testrun-changed-files.txt
+
+      - name: Current Directory
+        run: |
+          pwd
+
+      - name: Show System Info
+        run: |
+          docker exec -e SKIP_REQUIREMENTS_INSTALL=1 -e PRINT_SYSTEM_INFO_ONLY=1 ${{ github.run_id}}_salt-test \
+          python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }}
+
+      - name: Free Space on Host
+        shell: bash
+        run: |
+          df -h
+
+      - name: Show container network info
+        shell: bash
+        run: |
+          docker exec ${{ github.run_id}}_salt-test ip addr
+
+      - name: Show container processes
+        shell: bash
+        run: |
+          docker exec ${{ github.run_id}}_salt-test ps auxf
+
+      - name: Free Space on Container
+        shell: bash
+        run: |
+          docker exec ${{ github.run_id}}_salt-test df -h
+
+      - name: Free Memory
+        shell: bash
+        run: |
+          free -h
+
+      - name: Configure apparmor
+        run: |
+          # Apparmor's unix-chkpwd profile gets in the way of tests needing to
+          # authenticate from inside a container.
+          cat <<'EOF' | sudo tee /etc/apparmor.d/unix-chkpwd
+          abi <abi/4.0>,
+          include <tunables/global>
+          profile unix-chkpwd /{,usr/}{,s}bin/unix_chkpwd flags=(unconfined) {
+            include <abstractions/base>
+            include <abstractions/nameservice>
+            # To write records to the kernel auditing log.
+            capability audit_write,
+            network netlink raw,
+            /{,usr/}{,s}bin/unix_chkpwd mr,
+            /etc/shadow r,
+            # systemd userdb, used in nspawn
+            /run/host/userdb/*.user r,
+            /run/host/userdb/*.user-privileged r,
+            # file_inherit
+            owner /dev/tty[0-9]* rw,
+            include if exists <local/unix-chkpwd>
+          }
+          EOF
+          sudo systemctl restart apparmor
+          sudo aa-status
+
+      - name: Run Changed Tests
+        id: run-fast-changed-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --core-tests --slow-tests --suppress-no-test-exit-code --from-filenames=testrun-changed-files.txt
+
+      - name: Run Fast Tests
+        id: run-fast-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code
+
+      - name: Run Slow Tests
+        id: run-slow-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code --no-fast-tests --slow-tests
+
+      - name: Run Core Tests
+        id: run-core-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code --no-fast-tests --core-tests
+
+      - name: Run Flaky Tests
+        id: run-flaky-tests
+        if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --suppress-no-test-exit-code --no-fast-tests --flaky-jail
+
+      - name: Run Full Tests
+        id: run-full-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
+        run: |
+          docker exec ${{ github.run_id}}_salt-test \
+            python3 -m nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --slow-tests --core-tests --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
+
+      - name: Stop Container
+        run: |
+          docker container stop ${{ github.run_id}}_salt-test
+
+      - name: Remove Container
+        run: |
+          docker container rm ${{ github.run_id}}_salt-test
+
+      - name: Remove Container Image
+        run: |
+          docker image rm ${{ matrix.container }}
+
+      - name: Fix file ownership
+        run: |
+          sudo chown -R "$(id -un)" .
+
+      - name: Combine Coverage Reports
+        if: always() && inputs.skip-code-coverage == false
+        run: |
+          nox --force-color -e combine-coverage
+
+      - name: Prepare Test Run Artifacts
+        id: download-artifacts-from-vm
+        if: always()
+        run: |
+          # Delete the salt onedir, we won't need it anymore and it will prevent
+          # from it showing in the tree command below
+          sudo rm -rf artifacts/salt*
+          tree -a artifacts
+          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
+            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
+          fi
+
+      - name: Upload Code Coverage Test Run Artifacts
+        if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-coverage-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/coverage/
+          include-hidden-files: true
+
+      - name: Upload JUnit XML Test Run Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-junit-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/xml-unittests-output/
+          include-hidden-files: true
+
+      - name: Upload Test Run Log Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/logs
+          include-hidden-files: true
+
+  test-macos:
+    name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }}
+    runs-on: ${{ matrix.runner }}
+    # Full test runs. Each chunk should never take more than 2 hours.
+    # Partial test runs(no chunk parallelization), 6 Hours
+    if: toJSON(fromJSON(inputs.matrix)['macos']) != '[]'
+    timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['macos'] }}
+    steps:
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: "Set `TIMESTAMP` environment variable"
+        shell: bash
+        run: |
+          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
+
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: Setup Salt Version
+        run: |
+          echo "${{ inputs.salt-version }}" > salt/_version.txt
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+          path: artifacts/
+
+      - name: Decompress Onedir Tarball
+        shell: bash
+        run: |
+          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
+          cd artifacts
+          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+
+      - name: Install System Dependencies
+        run: |
+          brew install tree
+
+      - name: Download nox.macos.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
+        uses: actions/download-artifact@v4
+        with:
+          name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
+
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+        env:
+          PIP_INDEX_URL: https://pypi.org/simple
+
+      - name: Decompress .nox Directory
+        run: |
+          nox --force-color -e decompress-dependencies -- macos ${{ matrix.arch }}
+
+      - name: Download testrun-changed-files.txt
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        uses: actions/download-artifact@v4
+        with:
+          name: testrun-changed-files.txt
+
+      - name: Show System Info
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_SYSTEM_INFO_ONLY: "1"
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }}
+
+      - name: Run Changed Tests
+        id: run-fast-changed-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            -k "mac or darwin" --core-tests --slow-tests --suppress-no-test-exit-code \
+            --from-filenames=testrun-changed-files.txt
+
+      - name: Run Fast Tests
+        id: run-fast-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            -k "mac or darwin" --suppress-no-test-exit-code
+
+      - name: Run Slow Tests
+        id: run-slow-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests
+
+      - name: Run Core Tests
+        id: run-core-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests
+
+      - name: Run Flaky Tests
+        id: run-flaky-tests
+        if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --flaky-jail
+
+      - name: Run Full Tests
+        id: run-full-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
+            --slow-tests --core-tests -k "mac or darwin" --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
+
+
+      - name: Fix file ownership
+        run: |
+          sudo chown -R "$(id -un)" .
+
+      - name: Combine Coverage Reports
+        if: always() && inputs.skip-code-coverage == false
+        run: |
+          nox --force-color -e combine-coverage
+
+      - name: Prepare Test Run Artifacts
+        id: download-artifacts-from-vm
+        if: always()
+        run: |
+          # Delete the salt onedir, we won't need it anymore and it will prevent
+          # from it showing in the tree command below
+          rm -rf artifacts/salt*
+          tree -a artifacts
+          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
+            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
+          fi
+
+      - name: Upload Code Coverage Test Run Artifacts
+        if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-coverage-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/coverage/
+          include-hidden-files: true
+
+      - name: Upload JUnit XML Test Run Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-junit-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/xml-unittests-output/
+          include-hidden-files: true
+
+      - name: Upload Test Run Log Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/logs
+          include-hidden-files: true
+
+  test-windows:
+    name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }}
+
+    if: toJSON(fromJSON(inputs.matrix)['windows']) != '[]'
+    runs-on: ${{ matrix.slug }}
+    # Full test runs. Each chunk should never take more than 2 hours.
+    # Partial test runs(no chunk parallelization), 6 Hours
+    timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['windows'] }}
+    steps:
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.10'
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: "Set `TIMESTAMP` environment variable"
+        shell: bash
+        run: |
+          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
+
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: Setup Salt Version
+        run: |
+          echo "${{ inputs.salt-version }}" > salt/_version.txt
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+          path: artifacts/
+
+      - name: Decompress Onedir Tarball
+        shell: bash
+        run: |
+          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
+          cd artifacts
+          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+
+      - name: Install System Dependencies
+        run: |
+          echo true
+
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+        env:
+          PIP_INDEX_URL: https://pypi.org/simple
+
+      - name: Download nox.windows.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
+        uses: actions/download-artifact@v4
+        with:
+          name: nox-windows-${{ matrix.arch }}-${{ inputs.nox-session }}
+
+      - name: Decompress .nox Directory
+        run: |
+          nox --force-color -e decompress-dependencies -- windows ${{ matrix.arch }}
+
+      - name: Download testrun-changed-files.txt
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        uses: actions/download-artifact@v4
+        with:
+          name: testrun-changed-files.txt
+
+      - name: Check nox python
+        continue-on-error: true
+        run: |
+          .nox/ci-test-onedir/Scripts/python.exe --version
+
+      - name: Show System Info
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_SYSTEM_INFO_ONLY: "1"
+        run: |
+          nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }}
+
+      - name: Run Changed Tests
+        id: run-fast-changed-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+          TMPDIR: ${{ runner.temp }}
+        shell: powershell
+        run: >
+          nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} --
+          -k "win" --core-tests --slow-tests --suppress-no-test-exit-code
+          "--from-filenames=testrun-changed-files.txt"
+
+      - name: Run Fast Tests
+        id: run-fast-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+          TMPDIR: ${{ runner.temp }}
+        shell: powershell
+        run: >
+          nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} --
+          -k "win" --suppress-no-test-exit-code
+
+      - name: Run Slow Tests
+        id: run-slow-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+          TMPDIR: ${{ runner.temp }}
+        shell: powershell
+        run: >
+          nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} --
+          -k "win" --suppress-no-test-exit-code --no-fast-tests --slow-tests
+
+      - name: Run Core Tests
+        id: run-core-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+          TMPDIR: ${{ runner.temp }}
+        shell: powershell
+        run: >
+          nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} --
+          -k "win" --suppress-no-test-exit-code --no-fast-tests --core-tests
+
+      - name: Run Flaky Tests
+        id: run-flaky-tests
+        if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+          TMPDIR: ${{ runner.temp }}
+        shell: powershell
+        run: >
+          nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} --
+          -k "win" --suppress-no-test-exit-code --no-fast-tests --flaky-jail
+
+      - name: Run Full Tests
+        id: run-full-tests
+        if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          COLUMNS: 190
+          PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+          PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+          PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+          PIP_DISABLE_PIP_VERSION_CHECK: "1"
+          RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+          SALT_TRANSPORT: ${{ matrix.transport }}
+          TMPDIR: ${{ runner.temp }}
+        shell: powershell
+        run: >
+          nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} --
+          --slow-tests --core-tests -k "win" --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
+
+
+      - name: Combine Coverage Reports
+        if: always() && inputs.skip-code-coverage == false
+        run: |
+          nox --force-color -e combine-coverage
+
+      - name: Prepare Test Run Artifacts
+        id: download-artifacts-from-vm
+        if: always()
+        shell: bash
+        run: |
+          # Delete the salt onedir, we won't need it anymore and it will prevent
+          # from it showing in the tree command below
+          rm -rf artifacts/salt*
+          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
+            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
+          fi
+
+      - name: Upload Code Coverage Test Run Artifacts
+        if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-coverage-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/coverage/
+          include-hidden-files: true
+
+      - name: Upload JUnit XML Test Run Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-junit-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/xml-unittests-output/
+          include-hidden-files: true
+
+      - name: Upload Test Run Log Artifacts
+        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.test-group || 1 }}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/logs
+          include-hidden-files: true
+
+
+  report:
+    name: Test Reports
+    runs-on: ubuntu-22.04
+    if: ${{ false }}
+    needs:
+      - test-linux
+      - test-linux-arm64
+      - test-macos
+      - test-windows
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
+    env:
+      PIP_INDEX_URL: https://pypi.org/simple
+
+    steps:
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.10'
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: Merge JUnit XML Test Run Artifacts
+        continue-on-error: true
+        uses: actions/upload-artifact/merge@v4
+        with:
+          name: testrun-junit-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}
+          pattern: testrun-junit-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-*
+          separate-directories: false
+          delete-merged: true
+
+      - name: Merge Log Test Run Artifacts
+        continue-on-error: true
+        uses: actions/upload-artifact/merge@v4
+        with:
+          name: testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}
+          pattern: testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-*
+          separate-directories: false
+          delete-merged: true
+
+      - name: Merge Code Coverage Test Run Artifacts
+        if: ${{ inputs.skip-code-coverage == false }}
+        continue-on-error: true
+        uses: actions/upload-artifact/merge@v4
+        with:
+          name: testrun-coverage-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}
+          pattern: testrun-coverage-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-*
+          separate-directories: false
+          delete-merged: true
+
+      - name: Download Code Coverage Test Run Artifacts
+        uses: actions/download-artifact@v4
+        if: ${{ inputs.skip-code-coverage == false }}
+        id: download-coverage-artifacts
+        with:
+          path: artifacts/coverage/
+          pattern: testrun-coverage-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}*
+          merge-multiple: true
+
+      - name: Show Downloaded Test Run Artifacts
+        if: ${{ inputs.skip-code-coverage == false }}
+        run: |
+          tree -a artifacts
+
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+
+      - name: Create XML Coverage Reports
+        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
+        run: |
+          nox --force-color -e create-xml-coverage-reports
+          mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ matrix.slug }}..${{ inputs.nox-session }}.xml
+          mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ matrix.slug }}..${{ inputs.nox-session }}.xml
+
+      - name: Report Salt Code Coverage
+        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
+        continue-on-error: true
+        run: |
+          nox --force-color -e report-coverage -- salt
+
+      - name: Report Combined Code Coverage
+        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
+        continue-on-error: true
+        run: |
+          nox --force-color -e report-coverage
+
+      - name: Rename Code Coverage DB
+        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
+        continue-on-error: true
+        run: |
+          mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}
+
+      - name: Upload Code Coverage DB
+        if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
+        uses: actions/upload-artifact@v4
+        with:
+          name: all-testrun-coverage-artifacts-${{ matrix.slug }}.${{ inputs.nox-session }}
+          path: artifacts/coverage
+          include-hidden-files: true
diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml
index 22e3e58bcfb..bd2453acff5 100644
--- a/.github/workflows/test-package-downloads-action.yml
+++ b/.github/workflows/test-package-downloads-action.yml
@@ -88,9 +88,7 @@ jobs:
     needs:
       - generate-matrix
     runs-on:
-      - self-hosted
-      - linux
-      - bastion
+      - ubuntu-latest
     env:
       USE_S3_CACHE: 'true'
     environment: ${{ inputs.environment }}
@@ -287,7 +285,7 @@ jobs:
         with:
           name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}-${{ matrix.pkg-type }}
           path: |
-            artifacts
+            artifacts/
             !artifacts/salt/*
             !artifacts/salt-*.tar.*
 
@@ -485,7 +483,7 @@ jobs:
         with:
           name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}-${{ matrix.pkg-type }}
           path: |
-            artifacts
+            artifacts/
             !artifacts/salt/*
             !artifacts/salt-*.tar.*
 
@@ -497,9 +495,7 @@ jobs:
     env:
       USE_S3_CACHE: 'true'
     runs-on:
-      - self-hosted
-      - linux
-      - bastion
+      - ubuntu-latest
     environment: ${{ inputs.environment }}
     timeout-minutes: 120  # 2 Hours - More than this and something is wrong
     strategy:
@@ -688,6 +684,6 @@ jobs:
         with:
           name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}-${{ matrix.pkg-type }}
           path: |
-            artifacts
+            artifacts/
             !artifacts/salt/*
             !artifacts/salt-*.tar.*
diff --git a/.github/workflows/test-packages-action-linux.yml b/.github/workflows/test-packages-action-linux.yml
deleted file mode 100644
index 432b8e04bb4..00000000000
--- a/.github/workflows/test-packages-action-linux.yml
+++ /dev/null
@@ -1,274 +0,0 @@
-name: Test Artifact
-
-on:
-  workflow_call:
-    inputs:
-      distro-slug:
-        required: true
-        type: string
-        description: The OS slug to run tests against
-      platform:
-        required: true
-        type: string
-        description: The platform being tested
-      arch:
-        required: true
-        type: string
-        description: The platform arch being tested
-      pkg-type:
-        required: true
-        type: string
-        description: The platform arch being tested
-      salt-version:
-        type: string
-        required: true
-        description: The Salt version of the packages to install and test
-      cache-prefix:
-        required: true
-        type: string
-        description: Seed used to invalidate caches
-      testing-releases:
-        required: true
-        type: string
-        description: A JSON list of releases to test upgrades against
-      nox-version:
-        required: true
-        type: string
-        description: The nox version to install
-      python-version:
-        required: false
-        type: string
-        description: The python version to run tests with
-        default: "3.10"
-      fips:
-        required: false
-        type: boolean
-        default: false
-        description: Test run with FIPS enabled
-      package-name:
-        required: false
-        type: string
-        description: The onedir package name to use
-        default: salt
-      nox-session:
-        required: false
-        type: string
-        description: The nox session to run
-        default: ci-test-onedir
-      skip-code-coverage:
-        required: false
-        type: boolean
-        description: Skip code coverage
-        default: false
-
-env:
-  COLUMNS: 190
-  AWS_MAX_ATTEMPTS: "10"
-  AWS_RETRY_MODE: "adaptive"
-  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
-  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
-  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
-  PIP_DISABLE_PIP_VERSION_CHECK: "1"
-  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
-  USE_S3_CACHE: 'true'
-
-jobs:
-
-  generate-matrix:
-    name: Generate Matrix
-    runs-on:
-      # We need to run on our self-hosted runners because we need proper credentials
-      # for boto3 to scan through our repositories.
-      - self-hosted
-      - linux
-      - x86_64
-    outputs:
-      pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
-      build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Generate Package Test Matrix
-        id: generate-pkg-matrix
-        run: |
-          tools ci pkg-matrix ${{ inputs.distro-slug }} \
-            ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
-
-
-  test:
-    name: Test
-    runs-on:
-      - self-hosted
-      - linux
-      - bastion
-    timeout-minutes: 120  # 2 Hours - More than this and something is wrong
-    needs:
-      - generate-matrix
-    strategy:
-      fail-fast: false
-      matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
-
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: "Set `TIMESTAMP` environment variable"
-        shell: bash
-        run: |
-          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Download Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
-          path: artifacts/pkg/
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-          path: artifacts/
-
-      - name: Decompress Onedir Tarball
-        shell: bash
-        run: |
-          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
-          cd artifacts
-          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-
-      - name: List Packages
-        run: |
-          tree artifacts/pkg/
-
-      - name: Download nox.linux.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
-        uses: actions/download-artifact@v4
-        with:
-          name: nox-linux-${{ inputs.arch }}-${{ inputs.nox-session }}
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Start VM
-        id: spin-up-vm
-        run: |
-          tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
-
-      - name: List Free Space
-        run: |
-          tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
-
-      - name: Upload Checkout To VM
-        run: |
-          tools --timestamps vm rsync ${{ inputs.distro-slug }}
-
-      - name: Decompress .nox Directory
-        run: |
-          tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
-
-      - name: Downgrade importlib-metadata
-        if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.tests-chunk) }}
-        run: |
-          # This step can go away once we stop testing classic packages upgrade/downgrades to/from 3005.x
-          tools --timestamps vm ssh ${{ inputs.distro-slug }} -- "sudo python3 -m pip install -U 'importlib-metadata<=4.13.0' 'virtualenv<=20.21.1'"
-
-      - name: Show System Info
-        run: |
-          tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
-            --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }}
-
-      - name: Run Package Tests
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ inputs.fips && '--fips ' || '' }}\
-          --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \
-          ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
-
-      - name: Download Test Run Artifacts
-        id: download-artifacts-from-vm
-        if: always() && steps.spin-up-vm.outcome == 'success'
-        run: |
-          tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
-          # Delete the salt onedir, we won't need it anymore and it will prevent
-          # from it showing in the tree command below
-          rm -rf artifacts/salt*
-          tree -a artifacts
-
-      - name: Destroy VM
-        if: always()
-        run: |
-          tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
-
-      - name: Upload Test Run Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts
-            !artifacts/pkg/*
-            !artifacts/salt/*
-            !artifacts/salt-*.tar.*
-
-  report:
-    name: Report
-    runs-on: ubuntu-latest
-    if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-    needs:
-      - generate-matrix
-      - test
-
-    steps:
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Merge Test Run Artifacts
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}
-          pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}-*
-          separate-directories: true
-          delete-merged: true
-
-      - name: Download Test Run Artifacts
-        id: download-test-run-artifacts
-        uses: actions/download-artifact@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}
-          path: artifacts
-
-      - name: Show Test Run Artifacts
-        if: always() && steps.download-test-run-artifacts.outcome == 'success'
-        run: |
-          tree -a artifacts
diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml
deleted file mode 100644
index 5e8c3069178..00000000000
--- a/.github/workflows/test-packages-action-macos.yml
+++ /dev/null
@@ -1,270 +0,0 @@
-name: Test Artifact
-
-on:
-  workflow_call:
-    inputs:
-      distro-slug:
-        required: true
-        type: string
-        description: The OS slug to run tests against
-      runner:
-        required: true
-        type: string
-        description: The GitHub runner name
-      platform:
-        required: true
-        type: string
-        description: The platform being tested
-      arch:
-        required: true
-        type: string
-        description: The platform arch being tested
-      pkg-type:
-        required: true
-        type: string
-        description: The platform arch being tested
-      salt-version:
-        type: string
-        required: true
-        description: The Salt version of the packages to install and test
-      cache-prefix:
-        required: true
-        type: string
-        description: Seed used to invalidate caches
-      testing-releases:
-        required: true
-        type: string
-        description: A JSON list of releases to test upgrades against
-      nox-version:
-        required: true
-        type: string
-        description: The nox version to install
-      python-version:
-        required: false
-        type: string
-        description: The python version to run tests with
-        default: "3.10"
-      package-name:
-        required: false
-        type: string
-        description: The onedir package name to use
-        default: salt
-      nox-session:
-        required: false
-        type: string
-        description: The nox session to run
-        default: ci-test-onedir
-      skip-code-coverage:
-        required: false
-        type: boolean
-        description: Skip code coverage
-        default: false
-
-env:
-  COLUMNS: 190
-  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
-  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
-  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
-  PIP_DISABLE_PIP_VERSION_CHECK: "1"
-  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
-
-jobs:
-
-  generate-matrix:
-    name: Generate Matrix
-    runs-on:
-      # We need to run on our self-hosted runners because we need proper credentials
-      # for boto3 to scan through our repositories.
-      - self-hosted
-      - linux
-      - x86_64
-    outputs:
-      pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
-      build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Generate Package Test Matrix
-        id: generate-pkg-matrix
-        run: |
-          tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }}  --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
-
-
-  test:
-    name: Test
-    runs-on: ${{ inputs.runner }}
-    timeout-minutes: 120  # 2 Hours - More than this and something is wrong
-    needs:
-      - generate-matrix
-    strategy:
-      fail-fast: false
-      matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
-
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: "Set `TIMESTAMP` environment variable"
-        shell: bash
-        run: |
-          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Download Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
-          path: artifacts/pkg/
-
-      - name: Install System Dependencies
-        run: |
-          brew install tree
-
-      - name: List Packages
-        run: |
-          tree artifacts/pkg/
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-          path: artifacts/
-
-      - name: Decompress Onedir Tarball
-        shell: bash
-        run: |
-          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
-          cd artifacts
-          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-
-      - name: Set up Python ${{ inputs.python-version }}
-        uses: actions/setup-python@v5
-        with:
-          python-version: "${{ inputs.python-version }}"
-
-      - name: Install Nox
-        run: |
-          python3 -m pip install 'nox==${{ inputs.nox-version }}'
-        env:
-          PIP_INDEX_URL: https://pypi.org/simple
-
-      - name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
-        uses: actions/download-artifact@v4
-        with:
-          name: nox-macos-${{ inputs.arch }}-${{ inputs.nox-session }}
-
-      - name: Decompress .nox Directory
-        run: |
-          nox --force-color -e decompress-dependencies -- macos ${{ inputs.arch }}
-
-      - name: Show System Info
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_SYSTEM_INFO_ONLY: "1"
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
-
-      - name: Run Package Tests
-        env:
-          SKIP_REQUIREMENTS_INSTALL: "1"
-          PRINT_TEST_SELECTION: "0"
-          PRINT_TEST_PLAN_ONLY: "0"
-          PRINT_SYSTEM_INFO: "0"
-          RERUN_FAILURES: "1"
-          GITHUB_ACTIONS_PIPELINE: "1"
-          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
-          COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
-        run: |
-          sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
-          ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
-
-      - name: Fix file ownership
-        run: |
-          sudo chown -R "$(id -un)" .
-
-      - name: Prepare Test Run Artifacts
-        id: download-artifacts-from-vm
-        if: always()
-        run: |
-          # Delete the salt onedir, we won't need it anymore and it will prevent
-          # from it showing in the tree command below
-          rm -rf artifacts/salt*
-          tree -a artifacts
-
-      - name: Upload Test Run Artifacts
-        if: always()
-        uses: actions/upload-artifact@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts
-            !artifacts/pkg/*
-            !artifacts/salt/*
-            !artifacts/salt-*.tar.*
-
-  report:
-    name: Report
-    runs-on: ubuntu-latest
-    if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-    needs:
-      - generate-matrix
-      - test
-
-    steps:
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Merge Test Run Artifacts
-        continue-on-error: true
-        uses: actions/upload-artifact/merge@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
-          pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-*
-          separate-directories: true
-          delete-merged: true
-
-      - name: Download Test Run Artifacts
-        id: download-test-run-artifacts
-        uses: actions/download-artifact@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
-          path: artifacts
-
-      - name: Show Test Run Artifacts
-        if: always() && steps.download-test-run-artifacts.outcome == 'success'
-        run: |
-          tree -a artifacts
-
-      - name: Set up Python ${{ inputs.python-version }}
-        uses: actions/setup-python@v5
-        with:
-          python-version: "${{ inputs.python-version }}"
-
-      - name: Install Nox
-        run: |
-          python3 -m pip install 'nox==${{ inputs.nox-version }}'
-        env:
-          PIP_INDEX_URL: https://pypi.org/simple
diff --git a/.github/workflows/test-packages-action-windows.yml b/.github/workflows/test-packages-action-windows.yml
deleted file mode 100644
index c21100f4e69..00000000000
--- a/.github/workflows/test-packages-action-windows.yml
+++ /dev/null
@@ -1,273 +0,0 @@
-name: Test Artifact
-
-on:
-  workflow_call:
-    inputs:
-      distro-slug:
-        required: true
-        type: string
-        description: The OS slug to run tests against
-      platform:
-        required: true
-        type: string
-        description: The platform being tested
-      arch:
-        required: true
-        type: string
-        description: The platform arch being tested
-      pkg-type:
-        required: true
-        type: string
-        description: The platform arch being tested
-      salt-version:
-        type: string
-        required: true
-        description: The Salt version of the packages to install and test
-      cache-prefix:
-        required: true
-        type: string
-        description: Seed used to invalidate caches
-      testing-releases:
-        required: true
-        type: string
-        description: A JSON list of releases to test upgrades against
-      nox-version:
-        required: true
-        type: string
-        description: The nox version to install
-      python-version:
-        required: false
-        type: string
-        description: The python version to run tests with
-        default: "3.10"
-      fips:
-        required: false
-        type: boolean
-        default: false
-        description: Test run with FIPS enabled
-      package-name:
-        required: false
-        type: string
-        description: The onedir package name to use
-        default: salt
-      nox-session:
-        required: false
-        type: string
-        description: The nox session to run
-        default: ci-test-onedir
-      skip-code-coverage:
-        required: false
-        type: boolean
-        description: Skip code coverage
-        default: false
-
-env:
-  COLUMNS: 190
-  AWS_MAX_ATTEMPTS: "10"
-  AWS_RETRY_MODE: "adaptive"
-  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
-  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
-  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
-  PIP_DISABLE_PIP_VERSION_CHECK: "1"
-  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
-
-jobs:
-
-  generate-matrix:
-    name: Generate Matrix
-    runs-on:
-      # We need to run on our self-hosted runners because we need proper credentials
-      # for boto3 to scan through our repositories.
-      - self-hosted
-      - linux
-      - x86_64
-    outputs:
-      pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
-      build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Generate Package Test Matrix
-        id: generate-pkg-matrix
-        run: |
-          tools ci pkg-matrix ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
-            ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
-
-
-  test:
-    name: Test
-    runs-on:
-      - self-hosted
-      - linux
-      - bastion
-    timeout-minutes: 120  # 2 Hours - More than this and something is wrong
-    needs:
-      - generate-matrix
-    strategy:
-      fail-fast: false
-      matrix:
-        include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
-
-    steps:
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: "Set `TIMESTAMP` environment variable"
-        shell: bash
-        run: |
-          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
-
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: Download Packages
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
-          path: artifacts/pkg/
-
-      - name: Download Onedir Tarball as an Artifact
-        uses: actions/download-artifact@v4
-        with:
-          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-          path: artifacts/
-
-      - name: Decompress Onedir Tarball
-        shell: bash
-        run: |
-          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
-          cd artifacts
-          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
-
-      - name: List Packages
-        run: |
-          tree artifacts/pkg/
-
-      - name: Download nox.windows.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
-        uses: actions/download-artifact@v4
-        with:
-          name: nox-windows-${{ inputs.arch }}-${{ inputs.nox-session }}
-
-      - name: Setup Python Tools Scripts
-        uses: ./.github/actions/setup-python-tools-scripts
-        with:
-          cache-prefix: ${{ inputs.cache-prefix }}
-
-      - name: Get Salt Project GitHub Actions Bot Environment
-        run: |
-          TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
-          SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
-          echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
-
-      - name: Start VM
-        id: spin-up-vm
-        run: |
-          tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
-
-      - name: List Free Space
-        run: |
-          tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
-
-      - name: Upload Checkout To VM
-        run: |
-          tools --timestamps vm rsync ${{ inputs.distro-slug }}
-
-      - name: Decompress .nox Directory
-        run: |
-          tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
-
-      - name: Downgrade importlib-metadata
-        if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.tests-chunk) }}
-        run: |
-          # This step can go away once we stop testing classic packages upgrade/downgrades to/from 3005.x
-          tools --timestamps vm ssh ${{ inputs.distro-slug }} -- "sudo python3 -m pip install -U 'importlib-metadata<=4.13.0' 'virtualenv<=20.21.1'"
-
-      - name: Show System Info
-        run: |
-          tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
-            --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }}
-
-      - name: Run Package Tests
-        run: |
-          tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ matrix.fips && '--fips ' || '' }}\
-          --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \
-          ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
-
-      - name: Download Test Run Artifacts
-        id: download-artifacts-from-vm
-        if: always() && steps.spin-up-vm.outcome == 'success'
-        run: |
-          tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
-          # Delete the salt onedir, we won't need it anymore and it will prevent
-          # from it showing in the tree command below
-          rm -rf artifacts/salt*
-          tree -a artifacts
-
-      - name: Destroy VM
-        if: always()
-        run: |
-          tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
-
-      - name: Upload Test Run Artifacts
-        if: always() && steps.download-artifacts-from-vm.outcome == 'success'
-        uses: actions/upload-artifact@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
-          path: |
-            artifacts
-            !artifacts/pkg/*
-            !artifacts/salt/*
-            !artifacts/salt-*.tar.*
-
-  report:
-    name: Report
-    runs-on: ubuntu-latest
-    if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
-    needs:
-      - generate-matrix
-      - test
-
-    steps:
-      - name: Checkout Source Code
-        uses: actions/checkout@v4
-
-      - name: "Throttle Builds"
-        shell: bash
-        run: |
-          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
-
-      - name: Merge Test Run Artifacts
-        uses: actions/upload-artifact/merge@v4
-        continue-on-error: true
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
-          pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-*
-          separate-directories: true
-          delete-merged: true
-
-      - name: Download Test Run Artifacts
-        id: download-test-run-artifacts
-        uses: actions/download-artifact@v4
-        with:
-          name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
-          path: artifacts
-
-      - name: Show Test Run Artifacts
-        if: always() && steps.download-test-run-artifacts.outcome == 'success'
-        run: |
-          tree -a artifacts
diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml
new file mode 100644
index 00000000000..76cad098ca2
--- /dev/null
+++ b/.github/workflows/test-packages-action.yml
@@ -0,0 +1,516 @@
+---
+name: Test Packages
+
+on:
+  workflow_call:
+    inputs:
+      salt-version:
+        type: string
+        required: true
+        description: The Salt version of the packages to install and test
+      cache-prefix:
+        required: true
+        type: string
+        description: Seed used to invalidate caches
+      testing-releases:
+        required: true
+        type: string
+        description: A JSON list of releases to test upgrades against
+      nox-version:
+        required: true
+        type: string
+        description: The nox version to install
+      python-version:
+        required: false
+        type: string
+        description: The python version to run tests with
+        default: "3.10"
+      nox-session:
+        required: false
+        type: string
+        description: The nox session to run
+        default: ci-test-onedir
+      skip-code-coverage:
+        required: false
+        type: boolean
+        description: Skip code coverage
+        default: false
+      package-name:
+        required: false
+        type: string
+        description: The onedir package name to use
+        default: salt
+      matrix:
+        required: true
+        type: string
+        description: Json job matrix config
+      linux_arm_runner:
+        required: true
+        type: string
+        description: Json job matrix config
+
+env:
+  COLUMNS: 190
+  AWS_MAX_ATTEMPTS: "10"
+  AWS_RETRY_MODE: "adaptive"
+  PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
+  PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
+  PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
+  PIP_DISABLE_PIP_VERSION_CHECK: "1"
+  RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
+  USE_S3_CACHE: 'false'
+
+jobs:
+
+  test-linux:
+    name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
+    runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
+    timeout-minutes: 120  # 2 Hours - More than this and something is wrong
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
+    steps:
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: "Set `TIMESTAMP` environment variable"
+        shell: bash
+        run: |
+          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
+
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: Download Packages
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ matrix.arch }}-${{ matrix.pkg_type }}
+          path: artifacts/pkg/
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+          path: artifacts/
+
+      - name: Decompress Onedir Tarball
+        shell: bash
+        run: |
+          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
+          cd artifacts
+          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+        env:
+          PIP_INDEX_URL: https://pypi.org/simple
+
+      - name: List Packages
+        run: |
+          tree artifacts/pkg/
+
+      - name: Download nox.linux.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
+        uses: actions/download-artifact@v4
+        with:
+          name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }}
+
+      - name: "Ensure docker is running"
+        run: |
+          sudo systemctl start containerd || exit 0
+
+      - name: "Pull container ${{ matrix.container }}"
+        run: |
+          docker pull ${{ matrix.container }}
+
+      - name: "Create container ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker create --name ${{ github.run_id }}_salt-test-pkg --workdir /__w/salt/salt --privileged -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true -v "/var/run/docker.sock":"/var/run/docker.sock" -v "/home/runner/work":"/__w" -v "/home/runner/work/_temp":"/__w/_temp" -v "/home/runner/work/_actions":"/__w/_actions" -v "/opt/hostedtoolcache":"/__t" -v "/home/runner/work/_temp/_github_home":"/github/home" -v "/home/runner/work/_temp/_github_workflow":"/github/workflow" --entrypoint "/usr/lib/systemd/systemd" ${{ matrix.container }} --systemd --unit rescue.target
+
+      - name: "Start container ${{ matrix.container }}"
+        run: |
+          /usr/bin/docker start ${{ github.run_id }}_salt-test-pkg
+
+      - name: Decompress .nox Directory
+        run: |
+          docker exec ${{ github.run_id}}_salt-test-pkg python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }}
+
+      - name: Setup Python Tools Scripts
+        uses: ./.github/actions/setup-python-tools-scripts
+        with:
+          cache-prefix: ${{ inputs.cache-prefix }}
+
+      - name: List Free Space
+        run: |
+          df -h || true
+
+      - name: Show System Info
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_SYSTEM_INFO_ONLY: "1"
+        run: |
+          docker exec ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
+
+      - name: Run Package Tests
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+        run: |
+          /usr/bin/docker exec ${{ github.run_id }}_salt-test-pkg \
+          python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
+          ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
+
+      - name: Upload Test Run Log Artifacts
+        if: always()
+        uses: actions/upload-artifact@v4
+        with:
+          name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/logs
+          include-hidden-files: true
+
+      - name: Upload Test Run Artifacts
+        if: always()
+        uses: actions/upload-artifact@v4
+        with:
+          name: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/
+            !artifacts/pkg/*
+            !artifacts/salt/*
+            !artifacts/salt-*.tar.*
+          include-hidden-files: true
+
+  test-macos:
+    name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
+    runs-on: ${{ matrix.runner }}
+    if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
+    timeout-minutes: 150  # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time)
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['macos'] }}
+    steps:
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: "Set `TIMESTAMP` environment variable"
+        shell: bash
+        run: |
+          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
+
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: Download Packages
+        uses: actions/download-artifact@v4
+        with:
+          name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos
+          path: artifacts/pkg/
+
+      - name: Install System Dependencies
+        run: |
+          brew install tree
+
+      - name: List Packages
+        run: |
+          tree artifacts/pkg/
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+          path: artifacts/
+
+      - name: Decompress Onedir Tarball
+        shell: bash
+        run: |
+          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
+          cd artifacts
+          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+        env:
+          PIP_INDEX_URL: https://pypi.org/simple
+
+      - name: Download nox.macos.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
+        uses: actions/download-artifact@v4
+        with:
+          name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
+
+      - name: Decompress .nox Directory
+        run: |
+          nox --force-color -e decompress-dependencies -- macos ${{ matrix.arch }}
+
+      - name: Show System Info
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_SYSTEM_INFO_ONLY: "1"
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
+
+      - name: Run Package Tests
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+        run: |
+          sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
+          ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
+
+      - name: Fix file ownership
+        run: |
+          sudo chown -R "$(id -un)" .
+
+      - name: Prepare Test Run Artifacts
+        id: download-artifacts-from-vm
+        if: always()
+        run: |
+          # Delete the salt onedir, we won't need it anymore and it will prevent
+          # from it showing in the tree command below
+          rm -rf artifacts/salt*
+          tree -a artifacts
+
+      - name: Upload Test Run Log Artifacts
+        if: always()
+        uses: actions/upload-artifact@v4
+        with:
+          name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/logs
+          include-hidden-files: true
+
+      - name: Upload Test Run Artifacts
+        if: always()
+        uses: actions/upload-artifact@v4
+        with:
+          name: pkg-testrun-artifacts-${{ matrix.slug }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/
+            !artifacts/pkg/*
+            !artifacts/salt/*
+            !artifacts/salt-*.tar.*
+          include-hidden-files: true
+
+
+  test-windows:
+    name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
+    runs-on: ${{ matrix.slug }}
+    timeout-minutes: 120  # 2 Hours - More than this and something is wrong
+    if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['windows'] }}
+    steps:
+
+      - name: Set up Python ${{ inputs.python-version }}
+        uses: actions/setup-python@v5
+        with:
+          python-version: "${{ inputs.python-version }}"
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
+
+
+      - name: "Set `TIMESTAMP` environment variable"
+        shell: bash
+        run: |
+          echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
+
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: Download Packages
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ matrix.arch }}-${{ matrix.pkg_type }}
+          path: ./artifacts/pkg/
+
+      - name: Download Onedir Tarball as an Artifact
+        uses: actions/download-artifact@v4
+        with:
+          name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+          path: ./artifacts/
+
+      - name: Decompress Onedir Tarball
+        shell: bash
+        run: |
+          python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
+          cd artifacts
+          tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
+
+      - name: Install Nox
+        run: |
+          python3 -m pip install 'nox==${{ inputs.nox-version }}'
+        env:
+          PIP_INDEX_URL: https://pypi.org/simple
+
+      - run: python3 --version
+
+      - name: Download nox.windows.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
+        uses: actions/download-artifact@v4
+        with:
+          name: nox-windows-${{ matrix.arch }}-${{ inputs.nox-session }}
+
+      - name: Decompress .nox Directory
+        run: |
+          nox --force-color -e decompress-dependencies -- windows ${{ matrix.arch }}
+
+      - name: List Important Directories
+        run: |
+          dir d:/
+          dir .
+          dir artifacts/
+          dir artifacts/pkg
+          dir .nox/ci-test-onedir/Scripts
+
+      - name: Check onedir python
+        continue-on-error: true
+        run: |
+          artifacts/salt/Scripts/python.exe --version
+
+      - name: Check nox python
+        continue-on-error: true
+        run: |
+          .nox/ci-test-onedir/Scripts/python.exe --version
+
+      - name: Show System Info
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          SKIP_CODE_COVERAGE: "1"
+          PRINT_SYSTEM_INFO_ONLY: "1"
+          PYTHONUTF8: "1"
+        run: |
+          nox --force-color -f noxfile.py -e "${{ inputs.nox-session }}-pkgs" -- '${{ matrix.tests-chunk }}' --log-cli-level=debug
+
+      - name: Run Package Tests
+        env:
+          SKIP_REQUIREMENTS_INSTALL: "1"
+          PRINT_TEST_SELECTION: "0"
+          PRINT_TEST_PLAN_ONLY: "0"
+          PRINT_SYSTEM_INFO: "0"
+          RERUN_FAILURES: "1"
+          GITHUB_ACTIONS_PIPELINE: "1"
+          SKIP_INITIAL_ONEDIR_FAILURES: "1"
+          SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
+          COVERAGE_CONTEXT: ${{ matrix.slug }}
+          OUTPUT_COLUMNS: "190"
+          PYTHONUTF8: "1"
+        run: >
+          nox --force-color -f noxfile.py -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
+          ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
+
+      - name: Prepare Test Run Artifacts
+        id: download-artifacts-from-vm
+        if: always()
+        shell: bash
+        run: |
+          # Delete the salt onedir, we won't need it anymore and it will prevent
+          # from it showing in the tree command below
+          rm -rf artifacts/salt*
+          if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
+            mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
+          fi
+
+      - name: Upload Test Run Log Artifacts
+        if: always()
+        uses: actions/upload-artifact@v4
+        with:
+          name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/logs
+          include-hidden-files: true
+
+      - name: Upload Test Run Artifacts
+        if: always()
+        uses: actions/upload-artifact@v4
+        with:
+          name: pkg-testrun-artifacts-${{ matrix.slug }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
+          path: |
+            artifacts/
+            !artifacts/pkg/*
+            !artifacts/salt/*
+            !artifacts/salt-*.tar.*
+          include-hidden-files: true
+
+  report:
+    name: Report
+    runs-on: ubuntu-22.04
+    if: ${{ false }}
+    needs:
+      - test-linux
+      - test-macos
+      - test-windows
+    strategy:
+      matrix:
+        include: ${{ fromJSON(inputs.matrix)['linux'] }}
+
+    steps:
+      - name: Checkout Source Code
+        uses: actions/checkout@v4
+
+      - name: "Throttle Builds"
+        shell: bash
+        run: |
+          t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
+
+      - name: Wait For Artifacts
+        run: |
+          sleep 60
+
+      - name: Merge Test Run Artifacts
+        continue-on-error: true
+        uses: actions/upload-artifact/merge@v4
+        with:
+          name: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}
+          pattern: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-*
+          separate-directories: true
+          delete-merged: true
+
+      - name: Wait For Artifacts 2
+        run: |
+          sleep 60
+
+      - name: Download Test Run Artifacts
+        id: download-test-run-artifacts
+        uses: actions/download-artifact@v4
+        with:
+          path: artifacts/
+          pattern: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}*
+          merge-multiple: true
+
+      - name: Show Test Run Artifacts
+        if: always()
+        run: |
+          tree -a artifacts
diff --git a/.github/workflows/workflow-finished.yml b/.github/workflows/workflow-finished.yml
index 81d48278b7c..910f7d2e1fa 100644
--- a/.github/workflows/workflow-finished.yml
+++ b/.github/workflows/workflow-finished.yml
@@ -13,7 +13,7 @@ on:
       - 3007.x
       - 3006.x
     types:
-     - completed
+      - completed
 
 permissions:
   contents: read
diff --git a/.gitignore b/.gitignore
index 9f8f0b54d12..e54f1b655ee 100644
--- a/.gitignore
+++ b/.gitignore
@@ -90,6 +90,7 @@ tests/unit/templates/roots
 # Pycharm
 .idea
 venv/
+.venv/
 
 # VS Code
 .vscode
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c742ba10a1d..eb3e9a231e6 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -59,7 +59,7 @@ repos:
       - id: tools
         alias: generate-workflows
         name: Generate GitHub Workflow Templates
-        files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/.*)$
+        files: ^(cicd/shared-gh-workflows-context\.yml|tools/utils/__init__.py|tools/precommit/workflows\.py|.github/workflows/.*)$
         pass_filenames: false
         args:
           - pre-commit
diff --git a/AUTHORS b/AUTHORS
index e6d611cf2a9..7d645d49670 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -8,114 +8,28 @@ Whos Who in Salt
 The Man With the Plan
 ----------------------------
 
-Thomas S. Hatch is the main developer of Salt. He is the founder, owner,
-maintainer and lead of the Salt project, as well as author of the majority
-of the Salt code and documentation.
+Thomas S. Hatch is the creator of Salt. He was the founder, owner,
+maintainer that lead Salt project, as well as author of the majority
+of initial Salt code and documentation.
+
+SaltStack, Inc. was acquired by VMware in 2020. In 2023, VMware was
+acquired by Broadcom.
+
+The Salt Project core team of developers are employed by Broadcom.
 
 Documentation System
 ----------------------------
 
-The documentation system was put together by Seth House, much of the
-documentation is being maintained by Seth.
-
-Developers
-----------------------------
-
-Aaron Bull Schaefer <aaron@elasticdog.com>
-Aaron Toponce <aaron.toponce@gmail.com>
-Andrew Hammond <andrew.george.hammond@gmail.com>
-Aditya Kulkarni <adi@saltstack.com>
-Alexander Pyatkin <asp@thexyz.net>
-Andre Sachs <andre@sachs.nom.za>
-Andrew Colin Kissa <andrew@topdog.za.net>
-Andrew Kuhnhausen <trane@errstr.com>
-Antti Kaihola <akaihol+github@ambitone.com>
-archme <archme.mail@gmail.com>
-Brad Barden <brad@mifflinet.net>
-Bret Palsson <bretep@gmail.com>
-Brian Wagner <wags@wagsworld.net>
-C. R. Oldham <cr@saltstack.com>
-Carl Loa Odin <carlodin@gmail.com>
-Carlo Pires <carlopires@gmail.com>
-Chris Rebert <chris.rebert@hulu.com>
-Chris Scheller <schelcj@umich.edu>
-Christer Edwards <christer.edwards@gmail.com>
-Clint Savage <herlo1@gmail.com>
-Colton Myers <cmyers@saltstack.com>
-Corey Quinn <corey@sequestered.net>
-Corin Kochenower <ckochenower@saltstack.com>
-Dan Garthwaite <dan@garthwaite.org>
-Daniel Wallace <danielwallace at gtmanfred dot com>
-David Boucha <boucha@gmail.com>
-David Pravec <alekibango@pravec.tk>
-deutsche
-Dmitry Kuzmenko <dkuzmenko@saltstack.com>
-Doug Renn <renn@nestegg.com>
-Eivind Uggedal <eivind@uggedal.com>
-epoelke@gmail.com <epoelke@heartflow.com>
-Eric Poelke <epoelke@gmail.com>
-Erik Nolte <enolte@beyondoblivion.com>
-Evan Borgstrom <evan@fatbox.ca>
-Forrest Alvarez <forrest.alvarez@gmail.com>
-Fred Reimer <freimer@freimer.org>
-Henrik Holmboe <henrik@holmboe.se>
-Gareth J. Greenaway <gareth@wiked.org>
-Jacob Albretsen <jakea@xmission.com>
-Jed Glazner <jglazner@coldcrow.com>
-Jeff Bauer <jbauer@rubic.com>
-Jeff Hutchins <jhutchins@getjive.com>
-Jeffrey C. Ollie <jeff@ocjtech.us>
-Jeff Schroeder <jeffschroeder@computer.org>
-Johnny Bergström
-Jonas Buckner <buckner.jonas@gmail.com>
-Jonathan Harker <k.jonathan.harker@hp.com>
-Joseph Hall <joseph@saltstack.com>
-Josmar Dias <josmarnet@gmail.com>
-Kent Tenney <ktenney@gmail.com>
-lexual
-Marat Shakirov
-Marc Abramowitz <marc+github@marc-abramowitz.com>
-Martin Schnabel <mb0@mb0.org>
-Mathieu Le Marec - Pasquet <kiorky@cryptelium.net>
-Matt Black
-Matthew Printz <hipokrit@gmail.com>
-Matthias Teege <matthias-git@mteege.de>
-Maxim Burgerhout <maxim@wzzrd.com>
-Mickey Malone <mickey.malone@gmail.com>
-Michael Steed <msteed@saltstack.com>
-Mike Place <mp@saltstack.com>
-Mircea Ulinic <ping@mirceaulinic.net>
-Mitch Anderson <mitch@metauser.net>
-Mostafa Hussein <mostafa.hussein91@gmail.com>
-Nathaniel Whiteinge <seth@eseth.com>
-Nicolas Delaby <nicolas.delaby@ezeep.com>
-Nicole Thomas <nicole@saltstack.com>
-Nigel Owen <nigelowen2.gmail.com>
-Nitin Madhok <nmadhok@g.clemson.edu>
-Oleg Anashkin <oleg.anashkin@gmail.com>
-Pedro Algarvio <pedro@algarvio.me>
-Peter Baumgartner
-Pierre Carrier <pierre@spotify.com>
-Rhys Elsmore <me@rhys.io>
-Rafael Caricio <rafael@caricio.com>
-Robert Fielding
-Sean Channel <pentabular@gmail.com>
-Seth House <seth@eseth.com>
-Seth Vidal <skvidal@fedoraproject.org>
-Stas Alekseev <stas.alekseev@gmail.com>
-Thibault Cohen <titilambert@gmail.com>
-Thomas Schreiber <tom@rizumu.us>
-Thomas S Hatch <thatch45@gmail.com>
-Tor Hveem <xt@bash.no>
-Travis Cline <travis.cline@gmail.com>
-Wieland Hoffmann <themineo+github@gmail.com>
+The initial documentation system was put together by Seth House.
 
+Documentation is now primarily maintained by the Salt Project core team and
+community members.
 
 Growing Community
 --------------------------------
 
-Salt is a rapidly growing project with a large community, to view all
-contributors please check Github, this file can sometimes be out of date:
+Salt is a rapidly growing project with a large community, and has had more than
+2,400 contributors over the years. To view all contributors, please check Github:
 
 https://github.com/saltstack/salt/graphs/contributors
 
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a5a5912826d..1a64b4f6e2b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,10 +13,23 @@ Versions are `MAJOR.PATCH`.
 ### Removed
 
 - The ``salt.utils.psutil_compat`` was deprecated and now removed in Salt 3008. Please use the ``psutil`` module directly. [#66160](https://github.com/saltstack/salt/issues/66160)
+## 3006.9 (2024-07-29)
+
+
+### Deprecated
+
+- Drop CentOS 7 support [#66623](https://github.com/saltstack/salt/issues/66623)
+- No longer build RPM packages with CentOS Stream 9 [#66624](https://github.com/saltstack/salt/issues/66624)
 
 
 ### Fixed
 
+- Made slsutil.renderer work with salt-ssh [#50196](https://github.com/saltstack/salt/issues/50196)
+- Fixed defaults.merge is not available when using salt-ssh [#51605](https://github.com/saltstack/salt/issues/51605)
+- Fixed config.get does not support merge option with salt-ssh [#56441](https://github.com/saltstack/salt/issues/56441)
+- Update to include croniter in pkg requirements [#57649](https://github.com/saltstack/salt/issues/57649)
+- Fixed state.test does not work with salt-ssh [#61100](https://github.com/saltstack/salt/issues/61100)
+- Made slsutil.findup work with salt-ssh [#61143](https://github.com/saltstack/salt/issues/61143)
 - Fixes multiple issues with the cmd module on Windows. Scripts are called using
   the ``-File`` parameter to the ``powershell.exe`` binary. ``CLIXML`` data in
   stderr is now removed (only applies to encoded commands). Commands can now be
@@ -35,6 +48,38 @@ Versions are `MAJOR.PATCH`.
 - Change log level of successful master cluster key exchange from error to info. [#66266](https://github.com/saltstack/salt/issues/66266)
 - Made `file.managed` skip download of a remote source if the managed file already exists with the correct hash [#66342](https://github.com/saltstack/salt/issues/66342)
 - Fixed nftables.build_rule breaks ipv6 rules by using the wrong syntax for source and destination addresses [#66382](https://github.com/saltstack/salt/issues/66382)
+- file.replace and file.search work properly with /proc files [#63102](https://github.com/saltstack/salt/issues/63102)
+- Fix utf8 handling in 'pass' renderer [#64300](https://github.com/saltstack/salt/issues/64300)
+- Fixed incorrect version argument will be ignored for multiple package targets warning when using pkgs argument to yumpkg module. [#64563](https://github.com/saltstack/salt/issues/64563)
+- salt-cloud honors root_dir config setting for log_file location and fixes for root_dir locations on windows. [#64728](https://github.com/saltstack/salt/issues/64728)
+- Fixed slsutil.update with salt-ssh during template rendering [#65067](https://github.com/saltstack/salt/issues/65067)
+- Fix config.items when called on minion [#65251](https://github.com/saltstack/salt/issues/65251)
+- Ensure on rpm and deb systems, that user and group for existing Salt, is maintained on upgrade [#65264](https://github.com/saltstack/salt/issues/65264)
+- Fix typo in nftables module to ensure unique nft family values [#65295](https://github.com/saltstack/salt/issues/65295)
+- pkg.installed state aggregate does not honors requires requisite [#65304](https://github.com/saltstack/salt/issues/65304)
+- Added SSH wrapper for logmod [#65630](https://github.com/saltstack/salt/issues/65630)
+- Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM [#65816](https://github.com/saltstack/salt/issues/65816)
+- Corrected x509_v2 CRL creation `last_update` and `next_update` values when system timezone is not UTC [#65837](https://github.com/saltstack/salt/issues/65837)
+- Make sure the root minion process handles SIGUSR1 and emits a traceback like it's child processes [#66095](https://github.com/saltstack/salt/issues/66095)
+- Replaced pyvenv with builtin venv for virtualenv_mod [#66132](https://github.com/saltstack/salt/issues/66132)
+- Made `file.managed` skip download of a remote source if the managed file already exists with the correct hash [#66342](https://github.com/saltstack/salt/issues/66342)
+- Fix win_task ExecutionTimeLimit and result/error code interpretation [#66347](https://github.com/saltstack/salt/issues/66347), [#66441](https://github.com/saltstack/salt/issues/66441)
+- Fixed nftables.build_rule breaks ipv6 rules by using the wrong syntax for source and destination addresses [#66382](https://github.com/saltstack/salt/issues/66382)
+- Fixed x509_v2 certificate.managed crash for locally signed certificates if the signing policy defines signing_private_key [#66414](https://github.com/saltstack/salt/issues/66414)
+- Fixed parallel state execution with Salt-SSH [#66514](https://github.com/saltstack/salt/issues/66514)
+- Fix support for FIPS approved encryption and signing algorithms. [#66579](https://github.com/saltstack/salt/issues/66579)
+- Fix relative file_roots paths [#66588](https://github.com/saltstack/salt/issues/66588)
+- Fixed an issue with cmd.run with requirements when the shell is not the
+  default [#66596](https://github.com/saltstack/salt/issues/66596)
+- Fix RPM package provides [#66604](https://github.com/saltstack/salt/issues/66604)
+- Upgrade relAenv to 0.16.1. This release fixes several package installs for salt-pip [#66632](https://github.com/saltstack/salt/issues/66632)
+- Upgrade relenv to 0.17.0 (https://github.com/saltstack/relenv/blob/v0.17.0/CHANGELOG.md) [#66663](https://github.com/saltstack/salt/issues/66663)
+- Upgrade dependencies due to security issues:
+  - pymysql>=1.1.1
+  - requests>=2.32.0
+  - docker>=7.1.0 [#66666](https://github.com/saltstack/salt/issues/66666)
+- Corrected missed line in branch 3006.x when backporting from PR 61620 and 65044 [#66683](https://github.com/saltstack/salt/issues/66683)
+- Remove debug output from shell scripts for packaging [#66747](https://github.com/saltstack/salt/issues/66747)
 
 
 ### Added
@@ -44,12 +89,18 @@ Versions are `MAJOR.PATCH`.
   unbootstrap chocolatey. [#64722](https://github.com/saltstack/salt/issues/64722)
 - Add Ubuntu 24.04 support [#66180](https://github.com/saltstack/salt/issues/66180)
 - Add Fedora 40 support, replacing Fedora 39 [#66300](https://github.com/saltstack/salt/issues/66300)
+- Add Ubuntu 24.04 support [#66180](https://github.com/saltstack/salt/issues/66180)
+- Add Fedora 40 support, replacing Fedora 39 [#66300](https://github.com/saltstack/salt/issues/66300)
+- Build RPM packages with Rocky Linux 9 (instead of CentOS Stream 9) [#66624](https://github.com/saltstack/salt/issues/66624)
 
 
 ### Security
 
 - Bump to `pydantic==2.6.4` due to https://github.com/advisories/GHSA-mr82-8j83-vxmv [#66433](https://github.com/saltstack/salt/issues/66433)
 - Bump to ``jinja2==3.1.4`` due to https://github.com/advisories/GHSA-h75v-3vvj-5mfj [#66488](https://github.com/saltstack/salt/issues/66488)
+- Bump to ``jinja2==3.1.4`` due to https://github.com/advisories/GHSA-h75v-3vvj-5mfj [#66488](https://github.com/saltstack/salt/issues/66488)
+- CVE-2024-37088 salt-call will fail with exit code 1 if bad pillar data is
+  encountered. [#66702](https://github.com/saltstack/salt/issues/66702)
 
 
 ## 3006.8 (2024-04-29)
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index e7f1c331127..313038982c3 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -60,7 +60,7 @@ representative at an online or offline event.
 
 Instances of abusive, harassing, or otherwise unacceptable behavior may be
 reported to the community leaders responsible for enforcement at
-conduct@saltstack.com.
+saltproject.pdl@broadcom.com.
 All complaints will be reviewed and investigated promptly and fairly.
 
 All community leaders are obligated to respect the privacy and security of the
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 9fc693dfd09..3688c5e0c53 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -1,30 +1,56 @@
-============
-Contributing
-============
+==============================================
+Contributing to Salt: A Guide for Contributors
+==============================================
 
-So you want to contribute to the Salt project? Excellent! You can help
-in a number of ways:
+So, you want to contribute to the Salt project? That's fantastic! There are many
+ways you can help improve Salt:
 
--  Use Salt and open well-written bug reports.
--  Join a `working group <https://github.com/saltstack/community>`__.
--  Answer questions on `irc <https://web.libera.chat/#salt>`__,
-   the `community Slack <https://via.vmw.com/salt-slack>`__,
-   the `salt-users mailing
-   list <https://groups.google.com/forum/#!forum/salt-users>`__,
-   `Server Fault <https://serverfault.com/questions/tagged/saltstack>`__,
-   or `r/saltstack on Reddit <https://www.reddit.com/r/saltstack/>`__.
--  Fix bugs.
--  `Improve the documentation <https://saltstack.gitlab.io/open/docs/docs-hub/topics/contributing.html>`__.
-- Provide workarounds, patches, or other code without tests.
-- Tell other people about problems you solved using Salt.
+- Use Salt and report bugs with clear, detailed descriptions.
+- Join a `working group <https://github.com/saltstack/community>`__ to
+  collaborate with other contributors.
+- Answer questions on platforms like
+  the `community Discord <https://discord.com/invite/J7b7EscrAs>`__,
+  the `salt-users mailing list <https://groups.google.com/forum/#!forum/salt-users>`__,
+  `Server Fault <https://serverfault.com/questions/tagged/saltstack>`__,
+  or `r/saltstack on Reddit <https://www.reddit.com/r/saltstack/>`__.
+- Fix bugs or contribute to the `documentation <https://saltstack.gitlab.io/open/docs/docs-hub/topics/contributing.html>`__.
+- Submit workarounds, patches, or code (even without tests).
+- Share your experiences and solutions to problems you've solved using Salt.
 
-If you'd like to update docs or fix an issue, you're going to need the
-Salt repo. The best way to contribute is using
-`Git <https://git-scm.com/>`__.
+Choosing the Right Branch for Your Pull Request
+===============================================
 
+We appreciate your contributions to the project! To ensure a smooth and
+efficient workflow, please follow these guidelines when submitting a Pull
+Request. Each type of contribution—whether it's fixing a bug, adding a feature,
+updating documentation, or fixing tests—should be targeted at the appropriate
+branch. This helps us manage changes effectively and maintain stability across
+versions.
+
+- **Bug Fixes:**
+
+  Create your Pull Request against the oldest supported branch where the bug
+  exists. This ensures that the fix can be applied to all relevant versions.
+
+- **New Features**:
+
+  For new features or enhancements, create your Pull Request against the master
+  branch.
+
+- **Documentation Updates:**
+
+  Documentation changes should be made against the master branch, unless they
+  are related to a bug fix, in which case they should follow the same branch as
+  the bug fix.
+
+- **Test Fixes:**
+
+  Pull Requests that fix broken or failing tests should be created against the
+  oldest supported branch where the issue occurs.
+
+Setting Up Your Salt Development Environment
+============================================
 
-Environment setup
-=================
 To hack on Salt or the docs you're going to need to set up your
 development environment. If you already have a workflow that you're
 comfortable with, you can use that, but otherwise this is an opinionated
@@ -109,7 +135,7 @@ Then activate it:
 
 Sweet! Now you're ready to clone Salt so you can start hacking away! If
 you get stuck at any point, check out the resources at the beginning of
-this guide. IRC and Slack are particularly helpful places to go.
+this guide. Discord and GitHub Discussions are particularly helpful places to go.
 
 
 Get the source!
@@ -605,7 +631,7 @@ your PR is submitted during the week you should be able to expect some
 kind of communication within that business day. If your tests are
 passing and we're not in a code freeze, ideally your code will be merged
 that week or month. If you haven't heard from your assigned reviewer, ping them
-on GitHub, `irc <https://web.libera.chat/#salt>`__, or Community Slack.
+on GitHub or `Community Discord <https://discord.com/invite/J7b7EscrAs>`__.
 
 It's likely that your reviewer will leave some comments that need
 addressing - it may be a style change, or you forgot a changelog entry,
diff --git a/README.rst b/README.rst
index dd32e11cc5f..63d4bf77568 100644
--- a/README.rst
+++ b/README.rst
@@ -6,17 +6,9 @@
    :alt: PyPi Package Downloads
    :target: https://pypi.org/project/salt
 
-.. image:: https://img.shields.io/lgtm/grade/python/github/saltstack/salt
-   :alt: PyPi Package Downloads
-   :target: https://lgtm.com/projects/g/saltstack/salt/context:python
-
-.. image:: https://img.shields.io/badge/slack-SaltProject-blue.svg?logo=slack
-   :alt: Salt Project Slack Community
-   :target: https://via.vmw.com/salt-slack
-
-.. image:: https://img.shields.io/twitch/status/saltprojectoss
-   :alt: Salt Project Twitch Channel
-   :target: https://www.twitch.tv/saltprojectoss
+.. image:: https://img.shields.io/badge/discord-SaltProject-blue.svg?logo=discord
+   :alt: Salt Project Discord Community
+   :target: https://discord.com/invite/J7b7EscrAs
 
 .. image:: https://img.shields.io/reddit/subreddit-subscribers/saltstack?style=social
    :alt: Salt Project subreddit
@@ -71,20 +63,21 @@ In addition to configuration management Salt can also:
 
 About our sponsors
 ==================
-Salt powers VMware's `VMware Aria Automation Config`_
-(previously vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found
+
+Salt powers VMware by Broadcom's `Tanzu Salt`_
+(previously Aria Automation Config / vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found
 under the hood of products from Juniper, Cisco, Cloudflare, Nutanix, SUSE, and
 Tieto, to name a few.
 
-The original sponsor of our community, SaltStack, was `acquired by VMware in 2020 <https://www.vmware.com/company/acquisitions/saltstack.html>`_.
-The Salt Project remains an open source ecosystem that VMware supports and
-contributes to. VMware ensures the code integrity and quality of the Salt
+The original sponsor of our community, SaltStack, was acquired by VMware in 2020.
+`VMware was later acquired by Broadcom in 2023 <https://investors.broadcom.com/news-releases/news-release-details/broadcom-completes-acquisition-vmware>`__.
+The Salt Project remains an open source ecosystem that Broadcom supports and
+contributes to. Broadcom ensures the code integrity and quality of the Salt
 modules by acting as the official sponsor and manager of the Salt project. Many
-of the core Salt Project contributors are also VMware employees. This team
+of the core Salt Project contributors are also Broadcom employees. This team
 carefully reviews and enhances the Salt modules to ensure speed, quality, and
 security.
 
-
 Download and install Salt
 =========================
 Salt is tested and packaged to run on CentOS, Debian, RHEL, Ubuntu, MacOS,
@@ -93,9 +86,11 @@ Windows, and more. Download Salt and get started now. See
 for more information.
 
 To download and install Salt, see:
-* `The Salt install guide <https://docs.saltproject.io/salt/install-guide/en/latest/index.html>`_
-* `Salt Project repository <https://repo.saltproject.io/>`_
 
+* `The Salt install guide <https://docs.saltproject.io/salt/install-guide/en/latest/index.html>`_
+    * `Salt Project Repository: Linux (RPM) <https://packages.broadcom.com/artifactory/saltproject-rpm>`__ - Where Salt ``rpm`` packages are officially stored and distributed.
+    * `Salt Project Repository: Linux (DEB) <https://packages.broadcom.com/artifactory/saltproject-deb>`__ - Where Salt ``deb`` packages are officially stored and distributed.
+    * `Salt Project Repository: GENERIC <https://packages.broadcom.com/artifactory/saltproject-generic>`__ - Where Salt Windows, macOS, etc. (non-rpm, non-deb) packages are officially stored and distributed.
 
 Technical support
 =================
@@ -103,7 +98,8 @@ Report bugs or problems using Salt by opening an issue: `<https://github.com/sal
 
 To join our community forum where you can exchange ideas, best practices,
 discuss technical support questions, and talk to project maintainers, join our
-Slack workspace: `Salt Project Community Slack`_
+Discord server: `Salt Project Community Discord`_
+
 
 
 Salt Project documentation
@@ -127,7 +123,7 @@ announcements.
 
 Other channels to receive security announcements include the
 `Salt Community mailing list <https://groups.google.com/forum/#!forum/salt-users>`_
-and the `Salt Project Community Slack`_.
+and the `Salt Project Community Discord`_.
 
 
 Responsibly reporting security vulnerabilities
@@ -152,11 +148,9 @@ Please be sure to review our
 `Code of Conduct <https://github.com/saltstack/salt/blob/master/CODE_OF_CONDUCT.md>`_.
 Also, check out some of our community resources including:
 
-* `Salt Project Community Wiki <https://github.com/saltstack/community/wiki>`_
-* `Salt Project Community Slack`_
-* `Salt Project: IRC on LiberaChat <https://web.libera.chat/#salt>`_
+* `Salt Project Community Discord`_
 * `Salt Project YouTube channel <https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg>`_
-* `Salt Project Twitch channel <https://www.twitch.tv/saltprojectoss>`_
+* `Salt Project Community Notes and Wiki <https://github.com/saltstack/community/>`_
 
 There are lots of ways to get involved in our community. Every month, there are
 around a dozen opportunities to meet with other contributors and the Salt Core
@@ -164,10 +158,8 @@ team and collaborate in real time. The best way to keep track is by subscribing
 to the **Salt Project Community Events Calendar** on the main
 `<https://saltproject.io>`_ website.
 
-If you have additional questions, email us at saltproject@vmware.com or reach out
-directly to the Community Manager, Jimmy Chunga via Slack. We'd be glad to
-have you join our community!
-
+If you have additional questions, email us at saltproject.pdl@broadcom.com or reach out
+directly to the Community Discord. We'd be glad to have you join our community!
 
 License
 =======
@@ -180,10 +172,8 @@ used by external modules.
 A complete list of attributions and dependencies can be found here:
 `salt/DEPENDENCIES.md <https://github.com/saltstack/salt/blob/master/DEPENDENCIES.md>`_
 
-.. _Salt Project Community Slack: https://via.vmw.com/salt-slack
-.. _VMware Aria Automation Config: https://www.vmware.com/products/vrealize-automation/saltstack-config.html
+.. _Salt Project Community Discord: https://discord.com/invite/J7b7EscrAs
+.. _Tanzu Salt: https://www.vmware.com/products/app-platform/tanzu-salt
 .. _Latest Salt Documentation: https://docs.saltproject.io/en/latest/
 .. _Open an issue: https://github.com/saltstack/salt/issues/new/choose
 .. _SECURITY.md: https://github.com/saltstack/salt/blob/master/SECURITY.md
-.. _Calendar html: https://outlook.office365.com/owa/calendar/105f69bacd4541baa849529aed37eb2d@vmware.com/434ec2155b2b4cce90144c87f0dd03d56626754050155294962/calendar.html
-.. _Calendar ics: https://outlook.office365.com/owa/calendar/105f69bacd4541baa849529aed37eb2d@vmware.com/434ec2155b2b4cce90144c87f0dd03d56626754050155294962/calendar.ics
diff --git a/SUPPORT.rst b/SUPPORT.rst
index f98c4d3f2ee..54b81d8881b 100644
--- a/SUPPORT.rst
+++ b/SUPPORT.rst
@@ -1,17 +1,10 @@
-Get SaltStack Support and Help
-==============================
+Get Salt Project Support and Help
+=================================
 
-**IRC Chat** - Join the vibrant, helpful and positive SaltStack chat room in
-LiberaChat at #salt. There is no need to introduce yourself, or ask permission
-to join in, just help and be helped! Make sure to wait for an answer, sometimes
-it may take a few moments for someone to reply.
+**Salt Project Discord** - Join the Salt Project Community Discord!
+Use the following link to join the Discord server:
 
-`<https://web.libera.chat/#salt>`_
-
-**SaltStack Slack** - Alongside IRC is our SaltStack Community Slack for the
-SaltStack Working groups. Use the following link to request an invitation.
-
-`<https://via.vmw.com/salt-slack>`_
+`<https://discord.com/invite/J7b7EscrAs>`_
 
 **Mailing List** - The SaltStack community users mailing list is hosted by
 Google groups. Anyone can post to ask questions about SaltStack products and
@@ -20,13 +13,13 @@ anyone can help answer. Join the conversation!
 `<https://groups.google.com/forum/#!forum/salt-users>`_
 
 You may subscribe to the list without a Google account by emailing
-salt-users+subscribe@googlegroups.com and you may post to the list by emailing
-salt-users@googlegroups.com
+``salt-users+subscribe@googlegroups.com`` and you may post to the list by emailing
+``salt-users@googlegroups.com``
 
 **Reporting Issues** - To report an issue with Salt, please follow the
 guidelines for filing bug reports:
 `<https://docs.saltproject.io/en/master/topics/development/reporting_bugs.html>`_
 
-**SaltStack Support** - If you need dedicated, prioritized support, please
-consider a SaltStack Support package that fits your needs:
-`<http://www.saltstack.com/support>`_
+**Salt Project Support** - If you need dedicated, prioritized support, please
+consider taking a look at the Enterprise product:
+`Tanzu Salt <https://www.vmware.com/products/app-platform/tanzu-salt>`__
diff --git a/changelog/33669.added.md b/changelog/33669.added.md
new file mode 100644
index 00000000000..45fe6ead2ba
--- /dev/null
+++ b/changelog/33669.added.md
@@ -0,0 +1,3 @@
+Issue #33669: Fixes an issue with the ``ini_managed`` execution module
+where it would always wrap the separator with spaces. Adds a new parameter
+named ``no_spaces`` that will not warp the separator with spaces.
diff --git a/changelog/41794.fixed.md b/changelog/41794.fixed.md
new file mode 100644
index 00000000000..97247b84dc7
--- /dev/null
+++ b/changelog/41794.fixed.md
@@ -0,0 +1 @@
+Fixed `salt.*.get` shorthand via Salt-SSH
diff --git a/changelog/50196.fixed.md b/changelog/50196.fixed.md
deleted file mode 100644
index 979411a640d..00000000000
--- a/changelog/50196.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Made slsutil.renderer work with salt-ssh
diff --git a/changelog/51605.fixed.md b/changelog/51605.fixed.md
deleted file mode 100644
index 990b34413d9..00000000000
--- a/changelog/51605.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fixed defaults.merge is not available when using salt-ssh
diff --git a/changelog/56441.fixed.md b/changelog/56441.fixed.md
deleted file mode 100644
index 489ad80f770..00000000000
--- a/changelog/56441.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fixed config.get does not support merge option with salt-ssh
diff --git a/changelog/57649.fixed.md b/changelog/57649.fixed.md
deleted file mode 100644
index 12d22a0531c..00000000000
--- a/changelog/57649.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
- Update to include croniter in pkg requirements
diff --git a/changelog/58969.fixed.md b/changelog/58969.fixed.md
new file mode 100644
index 00000000000..366607bd97e
--- /dev/null
+++ b/changelog/58969.fixed.md
@@ -0,0 +1,4 @@
+Issue 58969: Fixes an issue with `saltclass.expand_classes_in_order`
+function where it was losing nested classes states during class
+expansion. The logic now use `salt.utils.odict.OrderedDict` to keep
+the inclusion ordering.
diff --git a/changelog/61001.fixed.md b/changelog/61001.fixed.md
new file mode 100644
index 00000000000..f9e6acf934d
--- /dev/null
+++ b/changelog/61001.fixed.md
@@ -0,0 +1,2 @@
+Fixed an issue uninstalling packages on Windows using pkg.removed where there
+are multiple versions of the same software installed
diff --git a/changelog/61100.fixed.md b/changelog/61100.fixed.md
deleted file mode 100644
index d7ac2b6bc3f..00000000000
--- a/changelog/61100.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fixed state.test does not work with salt-ssh
diff --git a/changelog/61143.fixed.md b/changelog/61143.fixed.md
deleted file mode 100644
index 08a62c9d8b1..00000000000
--- a/changelog/61143.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Made slsutil.findup work with salt-ssh
diff --git a/changelog/62501.fixed.md b/changelog/62501.fixed.md
new file mode 100644
index 00000000000..5b9b0460322
--- /dev/null
+++ b/changelog/62501.fixed.md
@@ -0,0 +1 @@
+Convert stdin string to bytes regardless of stdin_raw_newlines
diff --git a/changelog/63933.fixed.md b/changelog/63933.fixed.md
new file mode 100644
index 00000000000..794c170bd04
--- /dev/null
+++ b/changelog/63933.fixed.md
@@ -0,0 +1,4 @@
+Issue 63933: Fixes an issue with `saltclass.expanded_dict_from_minion`
+function where it was passing a reference to minion `dict` which was
+overridden by nested classes during class expansion. Copy the node
+definition with `copy.deepcopy` instead of passing a reference.
diff --git a/changelog/64300.fixed.md b/changelog/64300.fixed.md
deleted file mode 100644
index 4418db1d04c..00000000000
--- a/changelog/64300.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix utf8 handling in 'pass' renderer
diff --git a/changelog/64563.fixed.md b/changelog/64563.fixed.md
deleted file mode 100644
index fadd9721fed..00000000000
--- a/changelog/64563.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fixed incorrect version argument will be ignored for multiple package targets warning when using pkgs argument to yumpkg module.
diff --git a/changelog/64630.fixed.md b/changelog/64630.fixed.md
new file mode 100644
index 00000000000..f49c58d4c2e
--- /dev/null
+++ b/changelog/64630.fixed.md
@@ -0,0 +1,3 @@
+Fixed an intermittent issue with file.recurse where the state would
+report failure even on success. Makes sure symlinks are created
+after the target file is created
diff --git a/changelog/64728.fixed.md b/changelog/64728.fixed.md
deleted file mode 100644
index afe36f42316..00000000000
--- a/changelog/64728.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-salt-cloud honors root_dir config setting for log_file location and fixes for root_dir locations on windows.
diff --git a/changelog/65067.fixed.md b/changelog/65067.fixed.md
deleted file mode 100644
index d6de87b5bc1..00000000000
--- a/changelog/65067.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fixed slsutil.update with salt-ssh during template rendering
diff --git a/changelog/65104.fixed.md b/changelog/65104.fixed.md
new file mode 100644
index 00000000000..020b990b630
--- /dev/null
+++ b/changelog/65104.fixed.md
@@ -0,0 +1 @@
+The 'profile' outputter does not crash with incorrectly formatted data
diff --git a/changelog/65251.fixed.md b/changelog/65251.fixed.md
deleted file mode 100644
index e8abd5af327..00000000000
--- a/changelog/65251.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix config.items when called on minion
diff --git a/changelog/65265.fixed.md b/changelog/65265.fixed.md
new file mode 100644
index 00000000000..87e9846928a
--- /dev/null
+++ b/changelog/65265.fixed.md
@@ -0,0 +1,2 @@
+Await on zmq monitor socket's poll method to fix publish server reliability in
+environment's with a large amount of minions.
diff --git a/changelog/65630.fixed.md b/changelog/65630.fixed.md
deleted file mode 100644
index e8650abcdc1..00000000000
--- a/changelog/65630.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Added SSH wrapper for logmod
diff --git a/changelog/65816.fixed.md b/changelog/65816.fixed.md
deleted file mode 100644
index 23aaa1e5e8e..00000000000
--- a/changelog/65816.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM
diff --git a/changelog/66213.fixed.md b/changelog/66213.fixed.md
new file mode 100644
index 00000000000..96f3a3139e8
--- /dev/null
+++ b/changelog/66213.fixed.md
@@ -0,0 +1 @@
+Fix vault module doesn't respect `server.verify` option during unwrap if verify is set to `False` or CA file on the disk
diff --git a/changelog/66228.fixed.md b/changelog/66228.fixed.md
new file mode 100644
index 00000000000..620b2c1e8b0
--- /dev/null
+++ b/changelog/66228.fixed.md
@@ -0,0 +1 @@
+Make sure the master_event_pub.ipc file has correct reed/write permissions for salt group.
diff --git a/changelog/66249.fixed.md b/changelog/66249.fixed.md
new file mode 100644
index 00000000000..dac7b563a49
--- /dev/null
+++ b/changelog/66249.fixed.md
@@ -0,0 +1 @@
+Fix batch mode hang indefinitely in some scenarios
diff --git a/changelog/66252.fixed.md b/changelog/66252.fixed.md
new file mode 100644
index 00000000000..2227c899844
--- /dev/null
+++ b/changelog/66252.fixed.md
@@ -0,0 +1 @@
+Applying `selinux.fcontext_policy_present` to a shorter path than an existing entry now works
diff --git a/changelog/66347.fixed.md b/changelog/66347.fixed.md
deleted file mode 100644
index e61e5ce64a9..00000000000
--- a/changelog/66347.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix win_task ExecutionTimeLimit and result/error code interpretation
diff --git a/changelog/66376.fixed.md b/changelog/66376.fixed.md
new file mode 100644
index 00000000000..5df1feaa252
--- /dev/null
+++ b/changelog/66376.fixed.md
@@ -0,0 +1 @@
+Fixed `salt.*.*` attribute syntax for non-Jinja renderers via Salt-SSH
diff --git a/changelog/66414.fixed.md b/changelog/66414.fixed.md
deleted file mode 100644
index e777d18226d..00000000000
--- a/changelog/66414.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fixed x509_v2 certificate.managed crash for locally signed certificates if the signing policy defines signing_private_key
diff --git a/changelog/66441.fixed.md b/changelog/66441.fixed.md
deleted file mode 100644
index e61e5ce64a9..00000000000
--- a/changelog/66441.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix win_task ExecutionTimeLimit and result/error code interpretation
diff --git a/changelog/66467.removed.md b/changelog/66467.removed.md
new file mode 100644
index 00000000000..aca1198858e
--- /dev/null
+++ b/changelog/66467.removed.md
@@ -0,0 +1 @@
+Remove psutil_compat.py file, which should have been removed when RHEL 6 EOL
diff --git a/changelog/66560.fixed.md b/changelog/66560.fixed.md
new file mode 100644
index 00000000000..e71230f25b4
--- /dev/null
+++ b/changelog/66560.fixed.md
@@ -0,0 +1 @@
+Correct bash-completion for Debian / Ubuntu
diff --git a/changelog/66579.fixed.md b/changelog/66579.fixed.md
deleted file mode 100644
index ccef663b846..00000000000
--- a/changelog/66579.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix support for FIPS approved encryption and signing algorithms.
diff --git a/changelog/66596.fixed.md b/changelog/66596.fixed.md
new file mode 100644
index 00000000000..a4a27151f2c
--- /dev/null
+++ b/changelog/66596.fixed.md
@@ -0,0 +1,2 @@
+Fixed an issue with cmd.run with requirements when the shell is not the
+default
diff --git a/changelog/66600.fixed.md b/changelog/66600.fixed.md
new file mode 100644
index 00000000000..c3db4e1263e
--- /dev/null
+++ b/changelog/66600.fixed.md
@@ -0,0 +1 @@
+Fixed accessing wrapper modules in Salt-SSH Jinja templates via attribute syntax
diff --git a/changelog/66604.fixed.md b/changelog/66604.fixed.md
deleted file mode 100644
index 4d1a771ca54..00000000000
--- a/changelog/66604.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Fix RPM package provides
diff --git a/changelog/66610.fixed.md b/changelog/66610.fixed.md
new file mode 100644
index 00000000000..d04973a68a8
--- /dev/null
+++ b/changelog/66610.fixed.md
@@ -0,0 +1 @@
+Fixed Salt-SSH crash when key deploy is skipped manually
diff --git a/changelog/66623.deprecated.md b/changelog/66623.deprecated.md
deleted file mode 100644
index 8d829eadec9..00000000000
--- a/changelog/66623.deprecated.md
+++ /dev/null
@@ -1 +0,0 @@
-Drop CentOS 7 support
diff --git a/changelog/66624.added.md b/changelog/66624.added.md
deleted file mode 100644
index fbc4adf84c7..00000000000
--- a/changelog/66624.added.md
+++ /dev/null
@@ -1 +0,0 @@
-Build RPM packages with Rocky Linux 9 (instead of CentOS Stream 9)
diff --git a/changelog/66624.deprecated.md b/changelog/66624.deprecated.md
deleted file mode 100644
index 10b397bae85..00000000000
--- a/changelog/66624.deprecated.md
+++ /dev/null
@@ -1 +0,0 @@
-No longer build RPM packages with CentOS Stream 9
diff --git a/changelog/66632.fixed.md b/changelog/66632.fixed.md
deleted file mode 100644
index c50213867ca..00000000000
--- a/changelog/66632.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Upgrade relAenv to 0.16.1. This release fixes several package installs for salt-pip
diff --git a/changelog/66663.fixed.md b/changelog/66663.fixed.md
deleted file mode 100644
index 14a40b4730e..00000000000
--- a/changelog/66663.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Upgrade relenv to 0.17.0 (https://github.com/saltstack/relenv/blob/v0.17.0/CHANGELOG.md)
diff --git a/changelog/66666.fixed.md b/changelog/66666.fixed.md
deleted file mode 100644
index 076088f4d0c..00000000000
--- a/changelog/66666.fixed.md
+++ /dev/null
@@ -1,4 +0,0 @@
-Upgrade dependencies due to security issues:
-- pymysql>=1.1.1
-- requests>=2.32.0
-- docker>=7.1.0
diff --git a/changelog/66683.fixed.md b/changelog/66683.fixed.md
deleted file mode 100644
index 2917188fa63..00000000000
--- a/changelog/66683.fixed.md
+++ /dev/null
@@ -1 +0,0 @@
-Corrected missed line in branch 3006.x when backporting from PR 61620 and 65044
diff --git a/changelog/66716.fixed.md b/changelog/66716.fixed.md
new file mode 100644
index 00000000000..f3ad42f8edf
--- /dev/null
+++ b/changelog/66716.fixed.md
@@ -0,0 +1,2 @@
+Fixed an issue where ``status.master`` wasn't detecting a connection to the
+specified master properly
diff --git a/changelog/66718.fixed.md b/changelog/66718.fixed.md
new file mode 100644
index 00000000000..8a4a15ebad4
--- /dev/null
+++ b/changelog/66718.fixed.md
@@ -0,0 +1,2 @@
+Fixed ``win_wua.available`` when some of the update objects are empty CDispatch
+objects. The ``available`` function no longer crashes
diff --git a/changelog/66726.fixed.md b/changelog/66726.fixed.md
new file mode 100644
index 00000000000..b9682900d1d
--- /dev/null
+++ b/changelog/66726.fixed.md
@@ -0,0 +1 @@
+Clean up multiprocessing file handles on minion
diff --git a/changelog/66772.fixed.md b/changelog/66772.fixed.md
new file mode 100644
index 00000000000..2f9f40ee523
--- /dev/null
+++ b/changelog/66772.fixed.md
@@ -0,0 +1 @@
+Fixed nacl.keygen for not yet existing sk_file or pk_file
diff --git a/changelog/66783.fixed.md b/changelog/66783.fixed.md
new file mode 100644
index 00000000000..2bd08e3411d
--- /dev/null
+++ b/changelog/66783.fixed.md
@@ -0,0 +1 @@
+fix yaml output
diff --git a/changelog/66784.fixed.md b/changelog/66784.fixed.md
new file mode 100644
index 00000000000..afe9df887a5
--- /dev/null
+++ b/changelog/66784.fixed.md
@@ -0,0 +1,2 @@
+Fixed an issue where enabling `grain_opts` in the minion config would cause
+some core grains to be overwritten.
diff --git a/changelog/66786.fixed.md b/changelog/66786.fixed.md
new file mode 100644
index 00000000000..22bb47e0806
--- /dev/null
+++ b/changelog/66786.fixed.md
@@ -0,0 +1,2 @@
+Fix an issue where files created using `salt.utils.atomicile.atomic_open()`
+were created with restrictive permissions instead of respecting the umask.
diff --git a/changelog/66789.fixed.md b/changelog/66789.fixed.md
new file mode 100644
index 00000000000..f6d18c6247d
--- /dev/null
+++ b/changelog/66789.fixed.md
@@ -0,0 +1 @@
+Fix bad async_method name on AsyncPubClient class
diff --git a/changelog/66796.fixed.md b/changelog/66796.fixed.md
new file mode 100644
index 00000000000..cff6c771fa9
--- /dev/null
+++ b/changelog/66796.fixed.md
@@ -0,0 +1 @@
+Ensure Manjaro ARM reports the correct os_family of Arch.
diff --git a/changelog/66835.fixed.md b/changelog/66835.fixed.md
new file mode 100644
index 00000000000..33d932b7fdf
--- /dev/null
+++ b/changelog/66835.fixed.md
@@ -0,0 +1,2 @@
+Removed ``salt.utils.data.decode`` usage from the fileserver. This function was
+necessary to support Python 2. This speeds up loading the list cache by 80-90x.
diff --git a/changelog/66837.fixed.md b/changelog/66837.fixed.md
new file mode 100644
index 00000000000..ccbe4a1155f
--- /dev/null
+++ b/changelog/66837.fixed.md
@@ -0,0 +1,3 @@
+Issue 66837: Fixes an issue with the `network.local_port_tcp` function
+where it was not parsing the IPv4 mapped IPv6 address correctly. The
+``::ffff:`` is now removed and only the IP address is returned.
diff --git a/changelog/66856.fixed.md b/changelog/66856.fixed.md
new file mode 100644
index 00000000000..22800e3b4fe
--- /dev/null
+++ b/changelog/66856.fixed.md
@@ -0,0 +1 @@
+Better handling output of `systemctl --version` with salt.grains.core._systemd
diff --git a/changelog/66858.fixed.md b/changelog/66858.fixed.md
new file mode 100644
index 00000000000..6cb01c0476a
--- /dev/null
+++ b/changelog/66858.fixed.md
@@ -0,0 +1,2 @@
+Upgrade relenv to 0.17.3. This release includes python 3.10.15, openssl 3.2.3,
+and fixes for pip 24.2.
diff --git a/changelog/66883.fixed.md b/changelog/66883.fixed.md
new file mode 100644
index 00000000000..999b049857b
--- /dev/null
+++ b/changelog/66883.fixed.md
@@ -0,0 +1 @@
+Added cryptogrpahy back to base.txt requirements as a dependency
diff --git a/changelog/66886.deprecated.md b/changelog/66886.deprecated.md
new file mode 100644
index 00000000000..597c0aee10a
--- /dev/null
+++ b/changelog/66886.deprecated.md
@@ -0,0 +1 @@
+Drop Arch Linux support
diff --git a/changelog/66888.fixed.md b/changelog/66888.fixed.md
new file mode 100644
index 00000000000..5284693b05d
--- /dev/null
+++ b/changelog/66888.fixed.md
@@ -0,0 +1 @@
+Update master cluster tutorial haproxy config with proper timeouts for publish port
diff --git a/changelog/66891.fixed.md b/changelog/66891.fixed.md
new file mode 100644
index 00000000000..7bd756d7f29
--- /dev/null
+++ b/changelog/66891.fixed.md
@@ -0,0 +1 @@
+Remove "perms" from `linux_acl.list_absent()` documentation
diff --git a/changelog/66931.fixed.md b/changelog/66931.fixed.md
new file mode 100644
index 00000000000..264c89e4427
--- /dev/null
+++ b/changelog/66931.fixed.md
@@ -0,0 +1,4 @@
+transports.tcp: ensure pull path is being used before attempting chmod.
+The fix prevents an unnecessary traceback when the TCP transport is
+not using unix sockets. No functionaly has changed as the traceback
+occurs when an async task was about to exit anyway.
diff --git a/changelog/66955.fixed.md b/changelog/66955.fixed.md
new file mode 100644
index 00000000000..d45b8f650a7
--- /dev/null
+++ b/changelog/66955.fixed.md
@@ -0,0 +1 @@
+Allow for secure-boot efivars directory having SecureBoot-xxx files, not directories with a data file
diff --git a/changelog/66959.fixed.md b/changelog/66959.fixed.md
new file mode 100644
index 00000000000..69e40d66479
--- /dev/null
+++ b/changelog/66959.fixed.md
@@ -0,0 +1,2 @@
+Removed the usage of wmic to get the disk and iscsi grains for Windows. The wmic
+binary is being deprecated.
diff --git a/changelog/66992.fixed.md b/changelog/66992.fixed.md
new file mode 100644
index 00000000000..434fb1bb6a2
--- /dev/null
+++ b/changelog/66992.fixed.md
@@ -0,0 +1,2 @@
+Fixes an issue with the LGPO module when trying to parse ADMX/ADML files
+that have a space in the XMLNS url in the policyDefinitionsResources header.
diff --git a/changelog/66993.fixed.md b/changelog/66993.fixed.md
new file mode 100644
index 00000000000..775a4c4bf6c
--- /dev/null
+++ b/changelog/66993.fixed.md
@@ -0,0 +1 @@
+Salt master waits for publish servers while starting up.
diff --git a/changelog/66996.fixed.md b/changelog/66996.fixed.md
new file mode 100644
index 00000000000..eff5079f53e
--- /dev/null
+++ b/changelog/66996.fixed.md
@@ -0,0 +1 @@
+Ensured global dunders like __env__ are defined in state module that are run in parallel on spawning platforms
diff --git a/changelog/66999.fixed.md b/changelog/66999.fixed.md
new file mode 100644
index 00000000000..83c219f98de
--- /dev/null
+++ b/changelog/66999.fixed.md
@@ -0,0 +1 @@
+Filtered unpicklable objects from the context dict when invoking states in parallel on spawning platforms to avoid a crash
diff --git a/changelog/67017.fixed.md b/changelog/67017.fixed.md
new file mode 100644
index 00000000000..69de78c0abc
--- /dev/null
+++ b/changelog/67017.fixed.md
@@ -0,0 +1 @@
+Update for deprecation of hex in pygit2 1.15.0 and above
diff --git a/changelog/67019.fixed.md b/changelog/67019.fixed.md
new file mode 100644
index 00000000000..d686ed7d849
--- /dev/null
+++ b/changelog/67019.fixed.md
@@ -0,0 +1 @@
+Fixed blob path for salt.ufw in the firewall tutorial documentation
diff --git a/changelog/67020.fixed.md b/changelog/67020.fixed.md
new file mode 100644
index 00000000000..dcddb965aea
--- /dev/null
+++ b/changelog/67020.fixed.md
@@ -0,0 +1 @@
+Update locations for bootstrap scripts, to new infrastructure, GitHub releases for bootstrap
diff --git a/changelog/67058.fixed.md b/changelog/67058.fixed.md
new file mode 100644
index 00000000000..248e4de082a
--- /dev/null
+++ b/changelog/67058.fixed.md
@@ -0,0 +1 @@
+Recognise newer AMD GPU devices
diff --git a/changelog/67122.fixed.md b/changelog/67122.fixed.md
new file mode 100644
index 00000000000..65649df74bb
--- /dev/null
+++ b/changelog/67122.fixed.md
@@ -0,0 +1,2 @@
+Fixed an issue with making changes to the Windows Firewall when the
+AllowInboundRules setting is set to True
diff --git a/cicd/golden-images.json b/cicd/golden-images.json
index ca7818fdd6b..b0504ad777a 100644
--- a/cicd/golden-images.json
+++ b/cicd/golden-images.json
@@ -1,8 +1,8 @@
 {
   "amazonlinux-2-arm64": {
-    "ami": "ami-0c98c023fba59d522",
+    "ami": "ami-0aab00f54b6cddde6",
     "ami_description": "CI Image of AmazonLinux 2 arm64",
-    "ami_name": "salt-project/ci/amazonlinux/2/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/amazonlinux/2/arm64/20240912.2135",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -10,9 +10,9 @@
     "ssh_username": "ec2-user"
   },
   "amazonlinux-2": {
-    "ami": "ami-02cba95cfd7074794",
+    "ami": "ami-0fd6cec7bbcf52d36",
     "ami_description": "CI Image of AmazonLinux 2 x86_64",
-    "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240912.2135",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -20,9 +20,9 @@
     "ssh_username": "ec2-user"
   },
   "amazonlinux-2023-arm64": {
-    "ami": "ami-0609f0e98f5a6b73d",
+    "ami": "ami-095e9e4757b5fca1a",
     "ami_description": "CI Image of AmazonLinux 2023 arm64",
-    "ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240509.1529",
+    "ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -30,29 +30,19 @@
     "ssh_username": "ec2-user"
   },
   "amazonlinux-2023": {
-    "ami": "ami-0554a801eb6dcc42c",
+    "ami": "ami-002d043f1a36bf06e",
     "ami_description": "CI Image of AmazonLinux 2023 x86_64",
-    "ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240509.1529",
+    "ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
     "is_windows": "false",
     "ssh_username": "ec2-user"
   },
-  "archlinux-lts": {
-    "ami": "ami-01ad78f19930b9747",
-    "ami_description": "CI Image of ArchLinux lts x86_64",
-    "ami_name": "salt-project/ci/archlinux/lts/x86_64/20240509.1530",
-    "arch": "x86_64",
-    "cloudwatch-agent-available": "false",
-    "instance_type": "t3a.large",
-    "is_windows": "false",
-    "ssh_username": "arch"
-  },
   "debian-11-arm64": {
-    "ami": "ami-0eff227d9a94d8692",
+    "ami": "ami-0ff63235fce7bea1d",
     "ami_description": "CI Image of Debian 11 arm64",
-    "ami_name": "salt-project/ci/debian/11/arm64/20240509.1529",
+    "ami_name": "salt-project/ci/debian/11/arm64/20240912.2135",
     "arch": "arm64",
     "cloudwatch-agent-available": "false",
     "instance_type": "m6g.large",
@@ -60,9 +50,9 @@
     "ssh_username": "admin"
   },
   "debian-11": {
-    "ami": "ami-099b2a5a1fb995166",
+    "ami": "ami-08685bfca48beeb67",
     "ami_description": "CI Image of Debian 11 x86_64",
-    "ami_name": "salt-project/ci/debian/11/x86_64/20240509.1529",
+    "ami_name": "salt-project/ci/debian/11/x86_64/20240912.2135",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -70,9 +60,9 @@
     "ssh_username": "admin"
   },
   "debian-12-arm64": {
-    "ami": "ami-0ab6b0cc8488f8880",
+    "ami": "ami-07d383138f04b32ba",
     "ami_description": "CI Image of Debian 12 arm64",
-    "ami_name": "salt-project/ci/debian/12/arm64/20240509.1529",
+    "ami_name": "salt-project/ci/debian/12/arm64/20240912.2135",
     "arch": "arm64",
     "cloudwatch-agent-available": "false",
     "instance_type": "m6g.large",
@@ -80,9 +70,9 @@
     "ssh_username": "admin"
   },
   "debian-12": {
-    "ami": "ami-0e1f5b55325249c4e",
+    "ami": "ami-0867ec74072fd97a0",
     "ami_description": "CI Image of Debian 12 x86_64",
-    "ami_name": "salt-project/ci/debian/12/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/debian/12/x86_64/20240912.2135",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -90,9 +80,9 @@
     "ssh_username": "admin"
   },
   "fedora-40-arm64": {
-    "ami": "ami-064df327a55f83953",
+    "ami": "ami-03be8e03c17f1abeb",
     "ami_description": "CI Image of Fedora 40 arm64",
-    "ami_name": "salt-project/ci/fedora/40/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/fedora/40/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -100,9 +90,9 @@
     "ssh_username": "fedora"
   },
   "fedora-40": {
-    "ami": "ami-08d8dbd4f063788de",
+    "ami": "ami-060a59b30809758b2",
     "ami_description": "CI Image of Fedora 40 x86_64",
-    "ami_name": "salt-project/ci/fedora/40/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/fedora/40/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -110,9 +100,9 @@
     "ssh_username": "fedora"
   },
   "opensuse-15": {
-    "ami": "ami-0f82d5ab3015af6ad",
+    "ami": "ami-0aaf63315ada5365b",
     "ami_description": "CI Image of Opensuse 15 x86_64",
-    "ami_name": "salt-project/ci/opensuse/15/x86_64/20240509.1529",
+    "ami_name": "salt-project/ci/opensuse/15/x86_64/20240912.2135",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -120,9 +110,9 @@
     "ssh_username": "ec2-user"
   },
   "photonos-4-arm64": {
-    "ami": "ami-0ea152c346cb8e13b",
+    "ami": "ami-0d425acec9d0d78a5",
     "ami_description": "CI Image of PhotonOS 4 arm64",
-    "ami_name": "salt-project/ci/photonos/4/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/photonos/4/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -130,9 +120,9 @@
     "ssh_username": "root"
   },
   "photonos-4": {
-    "ami": "ami-09b55d0bf3a1aa7e5",
+    "ami": "ami-056d988807f8b586d",
     "ami_description": "CI Image of PhotonOS 4 x86_64",
-    "ami_name": "salt-project/ci/photonos/4/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/photonos/4/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -140,9 +130,9 @@
     "ssh_username": "root"
   },
   "photonos-5-arm64": {
-    "ami": "ami-09de4952bc9fc068a",
+    "ami": "ami-059f47b459d04544a",
     "ami_description": "CI Image of PhotonOS 5 arm64",
-    "ami_name": "salt-project/ci/photonos/5/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/photonos/5/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -150,9 +140,9 @@
     "ssh_username": "root"
   },
   "photonos-5": {
-    "ami": "ami-0c3375a583643fc77",
+    "ami": "ami-06424daf7c85ffff0",
     "ami_description": "CI Image of PhotonOS 5 x86_64",
-    "ami_name": "salt-project/ci/photonos/5/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/photonos/5/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -160,9 +150,9 @@
     "ssh_username": "root"
   },
   "rockylinux-8-arm64": {
-    "ami": "ami-0662cc201cada14b8",
+    "ami": "ami-0a21b175629f1a793",
     "ami_description": "CI Image of RockyLinux 8 arm64",
-    "ami_name": "salt-project/ci/rockylinux/8/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/rockylinux/8/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -170,9 +160,9 @@
     "ssh_username": "rocky"
   },
   "rockylinux-8": {
-    "ami": "ami-071ca70a907d79e05",
+    "ami": "ami-01032695e18f0fe85",
     "ami_description": "CI Image of RockyLinux 8 x86_64",
-    "ami_name": "salt-project/ci/rockylinux/8/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/rockylinux/8/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -180,9 +170,9 @@
     "ssh_username": "rocky"
   },
   "rockylinux-9-arm64": {
-    "ami": "ami-065842dfdf03a1a03",
+    "ami": "ami-0c9147ca5f07effc6",
     "ami_description": "CI Image of RockyLinux 9 arm64",
-    "ami_name": "salt-project/ci/rockylinux/9/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/rockylinux/9/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -190,9 +180,9 @@
     "ssh_username": "rocky"
   },
   "rockylinux-9": {
-    "ami": "ami-09f5d6df00e99ba16",
+    "ami": "ami-01a72f34d198efc4a",
     "ami_description": "CI Image of RockyLinux 9 x86_64",
-    "ami_name": "salt-project/ci/rockylinux/9/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/rockylinux/9/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -200,9 +190,9 @@
     "ssh_username": "rocky"
   },
   "ubuntu-20.04-arm64": {
-    "ami": "ami-00171fa604b826054",
+    "ami": "ami-0bf8ea4c07a88d6c5",
     "ami_description": "CI Image of Ubuntu 20.04 arm64",
-    "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -210,9 +200,9 @@
     "ssh_username": "ubuntu"
   },
   "ubuntu-20.04": {
-    "ami": "ami-07ddfbdc489064022",
+    "ami": "ami-08a84f7455622c3d5",
     "ami_description": "CI Image of Ubuntu 20.04 x86_64",
-    "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -220,9 +210,9 @@
     "ssh_username": "ubuntu"
   },
   "ubuntu-22.04-arm64": {
-    "ami": "ami-0e6b6fc1dd298e055",
+    "ami": "ami-0415a2d2279277d61",
     "ami_description": "CI Image of Ubuntu 22.04 arm64",
-    "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -230,9 +220,9 @@
     "ssh_username": "ubuntu"
   },
   "ubuntu-22.04": {
-    "ami": "ami-0736289579c0d01ba",
+    "ami": "ami-055513129ce06397c",
     "ami_description": "CI Image of Ubuntu 22.04 x86_64",
-    "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -240,9 +230,9 @@
     "ssh_username": "ubuntu"
   },
   "ubuntu-24.04-arm64": {
-    "ami": "ami-015058823f69446b3",
+    "ami": "ami-035ef6d54ec25b0fa",
     "ami_description": "CI Image of Ubuntu 24.04 arm64",
-    "ami_name": "salt-project/ci/ubuntu/24.04/arm64/20240509.1530",
+    "ami_name": "salt-project/ci/ubuntu/24.04/arm64/20240912.2136",
     "arch": "arm64",
     "cloudwatch-agent-available": "true",
     "instance_type": "m6g.large",
@@ -250,9 +240,9 @@
     "ssh_username": "ubuntu"
   },
   "ubuntu-24.04": {
-    "ami": "ami-0eb04152e7cafaaf9",
+    "ami": "ami-0a287b781a487ec65",
     "ami_description": "CI Image of Ubuntu 24.04 x86_64",
-    "ami_name": "salt-project/ci/ubuntu/24.04/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/ubuntu/24.04/x86_64/20240912.2136",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.large",
@@ -260,9 +250,9 @@
     "ssh_username": "ubuntu"
   },
   "windows-2016": {
-    "ami": "ami-06026cb4d83072df5",
+    "ami": "ami-030cdb60764141f56",
     "ami_description": "CI Image of Windows 2016 x86_64",
-    "ami_name": "salt-project/ci/windows/2016/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/windows/2016/x86_64/20240913.1756",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.xlarge",
@@ -270,9 +260,9 @@
     "ssh_username": "Administrator"
   },
   "windows-2019": {
-    "ami": "ami-095a9256ec0e8261c",
+    "ami": "ami-08f10b0d4914572de",
     "ami_description": "CI Image of Windows 2019 x86_64",
-    "ami_name": "salt-project/ci/windows/2019/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/windows/2019/x86_64/20240913.1756",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.xlarge",
@@ -280,9 +270,9 @@
     "ssh_username": "Administrator"
   },
   "windows-2022": {
-    "ami": "ami-0d295c0711e513c05",
+    "ami": "ami-07eda52ffbd76a4c6",
     "ami_description": "CI Image of Windows 2022 x86_64",
-    "ami_name": "salt-project/ci/windows/2022/x86_64/20240509.1530",
+    "ami_name": "salt-project/ci/windows/2022/x86_64/20240913.1756",
     "arch": "x86_64",
     "cloudwatch-agent-available": "true",
     "instance_type": "t3a.xlarge",
diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml
index 68a5dd25541..d9a08d10f46 100644
--- a/cicd/shared-gh-workflows-context.yml
+++ b/cicd/shared-gh-workflows-context.yml
@@ -1,14 +1,17 @@
 nox_version: "2022.8.7"
-python_version: "3.10.14"
-relenv_version: "0.17.0"
+python_version: "3.10.15"
+relenv_version: "0.18.0"
 release_branches:
   - "3006.x"
   - "3007.x"
-mandatory_os_slugs:
+pr-testrun-slugs:
+  - ubuntu-24.04-pkg
+  - ubuntu-24.04
   - rockylinux-9
-  - amazonlinux-2023-arm64
-  - archlinux-lts
-  - photonos-5-arm64
-  - macos-12
-  - ubuntu-24.04-arm64
+  - rockylinux-9-pkg
   - windows-2022
+  - windows-2022-pkg
+  - macos-15
+  - macos-15-pkg
+full-testrun-slugs:
+  - all
diff --git a/doc/Makefile b/doc/Makefile
index 9b1b1939a9b..60d95e24e81 100644
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -16,7 +16,7 @@ ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
 # the i18n builder cannot share the environment and doctrees with the others
 I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
 
-.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp latex latexpdf text man changes linkcheck doctest
 
 help:
 	@echo "Please use \`make <target>' where <target> is one of"
@@ -28,7 +28,6 @@ help:
 	@echo "  htmlhelp   to make HTML files and a HTML help project"
 	@echo "  qthelp     to make HTML files and a qthelp project"
 	@echo "  devhelp    to make HTML files and a Devhelp project"
-	@echo "  epub       to make an epub"
 	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
 	@echo "  pdf        to make Salt-all.pdf and splitted pdf using xelatex"
 	@echo "  cheatsheet to create salt-cheatsheet.pdf"
@@ -101,11 +100,6 @@ devhelp: check_sphinx-build
 	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt"
 	@echo "# devhelp"
 
-epub: check_sphinx-build
-	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
-	@echo
-	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
 latex: check_sphinx-build
 	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
 	@echo
diff --git a/doc/_incl/requisite_incl.rst b/doc/_incl/requisite_incl.rst
index b478527d6b1..f5723a952ea 100644
--- a/doc/_incl/requisite_incl.rst
+++ b/doc/_incl/requisite_incl.rst
@@ -7,4 +7,4 @@ following the instructions in the
     The Salt Project community can help offer advice and help troubleshoot
     technical issues as you're learning about Salt. One of the best places to
     talk to the community is on the
-    `Salt Project Slack workspace <https://saltstackcommunity.slack.com/>`_.
+    `Salt Project Discord Community <https://discord.com/invite/J7b7EscrAs>`_.
diff --git a/doc/_themes/saltstack/layout.html b/doc/_themes/saltstack/layout.html
index 8b2426c4d66..59393aef8d0 100644
--- a/doc/_themes/saltstack/layout.html
+++ b/doc/_themes/saltstack/layout.html
@@ -248,8 +248,8 @@
                                 </div>
                                 <div class="footerCol">
                                         <h4>Community</h4>
-                                        <a href="http://saltstack.org">saltstack.org</a>
-                                        <a href="http://docs.saltstack.org/en/latest/">Documentation</a>
+                                        <a href="http://saltproject.io">saltproject.io</a>
+                                        <a href="http://docs.saltproject.io/en/latest/">Documentation</a>
 <!--                                    <a href="#">Blogs</a> -->
                                 </div>
                         </div>
diff --git a/doc/_themes/saltstack2/layout.html b/doc/_themes/saltstack2/layout.html
index 32fff3e6095..ef99aa2ebbd 100644
--- a/doc/_themes/saltstack2/layout.html
+++ b/doc/_themes/saltstack2/layout.html
@@ -163,16 +163,11 @@
                     <!-- Collect the nav links, forms, and other content for toggling -->
                     <div class="collapse navbar-collapse" id="navbarCollapse">
                         <ul class="nav navbar-nav">
-                <li><a href="/en/latest/">Overview</a></li>
-                <li><a href="https://docs.saltproject.io/salt/user-guide/en/latest/">Salt User Guide</a></li>
-                <li><a href="/en/latest/contents.html">Documentation</a></li>
-                <li><a href="https://repo.saltproject.io">Downloads</a></li>
-                <li><a href="/en/latest/topics/development/">Develop</a></li>
-                            <!--<li><a href="/en/2016.3/faq/">FAQ</a></li>
-                            <li><a href="/en/2016.3/samples/">Code Samples</a></li>-->
-                            <!--                <li><a href="https://repo.saltproject.io" target="_blank">Downloads</a></li>-->
-                            <!--<li><a href="http://saltstack.com/training" target="_blank">Training</a></li>
-                            <li><a href="http://saltstack.com/support" target="_blank">Support</a></li>-->
+                            <li><a href="/en/latest/">Overview</a></li>
+                            <li><a href="https://docs.saltproject.io/salt/user-guide/en/latest/">Salt User Guide</a></li>
+                            <li><a href="/en/latest/contents.html">Documentation</a></li>
+                            <li><a href="https://packages.broadcom.com/artifactory/saltproject-generic/">Downloads</a></li>
+                            <li><a href="/en/latest/topics/development/">Develop</a></li>
                         </ul>
                     </div>
                 </div>
@@ -193,10 +188,8 @@
 
                                     {% if not (build_type == repo_primary_branch or build_type == "next") and on_saltstack %}
                                     <li><a class="icon-dl" href="/en/pdf/Salt-{{ release }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
-                                    <li><a class="icon-dl" href="/en/epub/Salt-{{ release }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
                                     {% elif build_type == repo_primary_branch and on_saltstack %}
                                     <li><a class="icon-dl" href="/en/pdf/Salt-{{ repo_primary_branch }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
-                                    <li><a class="icon-dl" href="/en/epub/Salt-{{ repo_primary_branch }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
                                     {% endif %}
                                 </ul>
                             </nav>
@@ -297,7 +290,7 @@
                 {% if on_saltstack %}
                 {#
                 {% if [True, False]|random %}
-                <a href="http://saltconf.com/register" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
+                <a href="http://saltproject.io" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
                 {% else %}
                 <a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
                 {% endif %} #}-->
diff --git a/doc/_themes/saltstack2/static/images/epub_icon.svg b/doc/_themes/saltstack2/static/images/epub_icon.svg
deleted file mode 100644
index e50861b8d3c..00000000000
--- a/doc/_themes/saltstack2/static/images/epub_icon.svg
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 15.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
-<svg version="1.1" id="Ebene_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
-	 width="600px" height="800px" viewBox="0 0 600 800" enable-background="new 0 0 600 800" xml:space="preserve">
-<g>
-	<path fill="#86B918" d="M131.969,755.898c11.244,0,19.199-4.599,25.85-11.573l7.591,6.827c-8.227,9.192-18.222,15.376-33.76,15.376
-		c-22.534,0-40.906-17.294-40.906-42.824c0-23.777,16.649-42.815,39.333-42.815c24.237,0,38.203,19.347,38.203,43.453
-		c0,1.113,0,2.381-0.15,3.803h-65.031C104.838,745.921,117.536,755.898,131.969,755.898z M155.927,719.111
-		c-1.277-14.907-9.844-27.916-26.194-27.916c-14.242,0-25.043,11.909-26.639,27.916H155.927z"/>
-	<path fill="#41281B" d="M231.23,725.461h-26.957v39.177H191.75V653.616h41.562c25.043,0,41.542,13.331,41.542,35.373
-		C274.854,713.096,254.87,725.461,231.23,725.461z M232.333,665.196h-28.062v48.839h27.447c18.386,0,30.441-9.823,30.441-24.577
-		C262.163,673.444,250.28,665.196,232.333,665.196z"/>
-	<path fill="#41281B" d="M388.377,717.193c0,32.363-18.535,49.174-46.3,49.174c-27.424,0-46.134-16.811-46.134-48.208v-64.543
-		h12.518v63.752c0,23.939,12.693,37.425,33.954,37.425c20.453,0,33.462-12.378,33.462-36.634v-64.543h12.5V717.193z"/>
-	<path fill="#41281B" d="M500.202,681.379c0,14.584-8.885,22.034-17.602,25.85c13.163,3.95,23.771,11.561,23.771,26.958
-		c0,19.172-16.014,30.451-40.269,30.451h-50.112V653.616h48.048C485.92,653.616,500.202,664.392,500.202,681.379z M487.51,683.122
-		c0-11.112-8.739-18.24-24.573-18.24h-34.59v38.229h33.63C477.033,703.104,487.51,696.277,487.51,683.122z M493.698,733.388
-		c0-12.218-10.162-19.192-29.513-19.192h-35.839v39.163h38.076C483.238,753.358,493.698,745.921,493.698,733.388z"/>
-</g>
-<path fill="#86B918" d="M298.549,467.913L129.228,298.579L298.549,129.27l56.446,56.435L242.104,298.579l56.441,56.443
-	l169.323-169.308L320.366,38.217c-12.043-12.055-31.579-12.055-43.634,0L38.169,276.781c-12.044,12.043-12.044,31.58,0,43.633
-	l238.563,238.557c12.055,12.05,31.591,12.05,43.634,0l238.565-238.557c12.044-12.053,12.044-31.59,0-43.633l-34.631-34.622
-	L298.549,467.913z"/>
-</svg>
diff --git a/doc/conf.py b/doc/conf.py
index 235a6967f50..8baac3fcc41 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -181,21 +181,7 @@ rst_prolog = """\
 .. _`salt-users`: https://groups.google.com/forum/#!forum/salt-users
 .. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce
 .. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers
-.. _`salt-slack`: https://via.vmw.com/salt-slack
-.. |windownload| raw:: html
-
-     <p>Python3 x86: <a
-     href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-x86-Setup.exe"><strong>Salt-Minion-{release}-x86-Setup.exe</strong></a>
-      | <a href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-x86-Setup.exe.md5"><strong>md5</strong></a></p>
-
-     <p>Python3 AMD64: <a
-     href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-AMD64-Setup.exe"><strong>Salt-Minion-{release}-AMD64-Setup.exe</strong></a>
-      | <a href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-AMD64-Setup.exe.md5"><strong>md5</strong></a></p>
-
-.. |osxdownloadpy3| raw:: html
-
-     <p>x86_64: <a href="https://repo.saltproject.io/osx/salt-{release}-py3-x86_64.pkg"><strong>salt-{release}-py3-x86_64.pkg</strong></a>
-      | <a href="https://repo.saltproject.io/osx/salt-{release}-py3-x86_64.pkg.md5"><strong>md5</strong></a></p>
+.. _`salt-discord`: https://discord.com/invite/J7b7EscrAs
 
 """.format(
     release=stripped_release
@@ -383,19 +369,6 @@ man_pages = [
 ]
 
 
-### epub options
-epub_title = "Salt Documentation"
-epub_author = "VMware, Inc."
-epub_publisher = epub_author
-epub_copyright = copyright
-
-epub_scheme = "URL"
-epub_identifier = "http://saltproject.io/"
-
-epub_tocdup = False
-# epub_tocdepth = 3
-
-
 def skip_mod_init_member(app, what, name, obj, skip, options):
     # pylint: disable=too-many-arguments,unused-argument
     if name.startswith("_"):
diff --git a/doc/faq.rst b/doc/faq.rst
index 25fef77f9ab..5ae6e55287d 100644
--- a/doc/faq.rst
+++ b/doc/faq.rst
@@ -30,7 +30,7 @@ SaltStack the company does make proprietary products which use Salt and its libr
 I think I found a bug! What should I do?
 ----------------------------------------
 
-The salt-users mailing list as well as the salt IRC channel can both be helpful
+The salt-users mailing list as well as the Community Discord can both be helpful
 resources to confirm if others are seeing the issue and to assist with
 immediate debugging.
 
diff --git a/doc/ref/configuration/delta_proxy.rst b/doc/ref/configuration/delta_proxy.rst
index be1831da399..44a9fb56816 100644
--- a/doc/ref/configuration/delta_proxy.rst
+++ b/doc/ref/configuration/delta_proxy.rst
@@ -146,11 +146,8 @@ Before installing the delta proxy minion, ensure that:
 Install or upgrade Salt
 -----------------------
 Ensure your Salt masters are running at least Salt version 3004. For instructions
-on installing or upgrading Salt, see `repo.saltproject.io
-<http://repo.saltproject.io/>`_. For RedHat systems, see `Install or Upgrade Salt
-<https://enterprise.saltproject.io/en/latest/docs/install-salt.html>`_.
-
-
+on installing or upgrading Salt, see the
+`Salt Install Guide <https://docs.saltproject.io/salt/install-guide/en/latest/>`_.
 
 .. _delta-proxy-install:
 
diff --git a/doc/ref/modules/index.rst b/doc/ref/modules/index.rst
index 3231368c036..8e84861461c 100644
--- a/doc/ref/modules/index.rst
+++ b/doc/ref/modules/index.rst
@@ -539,8 +539,6 @@ the module using the following field lists:
 
 .. code-block:: text
 
-    :maintainer:    Thomas Hatch <thatch@saltstack.com, Seth House <shouse@saltstack.com>
-    :maturity:      new
     :depends:       python-mysqldb
     :platform:      all
 
diff --git a/doc/ref/states/requisites.rst b/doc/ref/states/requisites.rst
index 18625faf951..c879e85f910 100644
--- a/doc/ref/states/requisites.rst
+++ b/doc/ref/states/requisites.rst
@@ -563,7 +563,7 @@ The ``onfail`` requisite is applied in the same way as ``require`` and ``watch``
     notify-build_failure:
       hipchat.send_message:
         - room_id: 123456
-        - message: "Building website fail on {{ salt.grains.get('id') }}"
+        - message: "Building website fail on {{ grains['id'] }}"
 
 
 The default behavior of the ``onfail`` when multiple requisites are listed is
@@ -723,7 +723,7 @@ be installed. Thus allowing for a requisite to be defined "after the fact".
 
 .. code-block:: sls
 
-    {% for cfile in salt.pillar.get('nginx:config_files') %}
+    {% for cfile in salt['pillar.get']('nginx:config_files') %}
     /etc/nginx/conf.d/{{ cfile }}:
       file.managed:
         - source: salt://nginx/configs/{{ cfile }}
diff --git a/doc/security/index.rst b/doc/security/index.rst
index 13a6b71f665..408d9df4079 100644
--- a/doc/security/index.rst
+++ b/doc/security/index.rst
@@ -123,4 +123,4 @@ In addition to the mailing lists, SaltStack also provides the following resource
 
 * `SaltStack Security Announcements <https://www.saltstack.com/security-announcements/>`__ landing page
 * `SaltStack Security RSS Feed <http://www.saltstack.com/feed/?post_type=security>`__
-* `SaltStack Community Slack Workspace <http://saltstackcommunity.slack.com/>`__
+* `Salt Project Discord Community <https://discord.com/invite/J7b7EscrAs>`__
diff --git a/doc/topics/best_practices.rst b/doc/topics/best_practices.rst
index 379d4fdafe5..154cc2ad58c 100644
--- a/doc/topics/best_practices.rst
+++ b/doc/topics/best_practices.rst
@@ -21,6 +21,13 @@ General rules
 4. Store sensitive data in pillar.
 5. Don't use grains for matching in your pillar top file for any sensitive
    pillars.
+6. When accessing modules from within a template, use the mapping
+   key syntax instead of the attribute one to avoid edge cases. Example:
+
+   .. code-block:: jinja
+
+       {%- set do_this = salt['pillar.get']('foo:bar') %}
+       {%- set avoid_this = salt.pillar.get('foo:bar') %}
 
    .. include:: ../_incl/grains_passwords.rst
 
diff --git a/doc/topics/cloud/windows.rst b/doc/topics/cloud/windows.rst
index 983e62cb0d2..c136a32cfe1 100644
--- a/doc/topics/cloud/windows.rst
+++ b/doc/topics/cloud/windows.rst
@@ -30,6 +30,21 @@ which Salt Cloud is running. See
 `Windows - Salt install guide <https://docs.saltproject.io/salt/install-guide/en/latest/topics/install-by-operating-system/windows.html>`_ for information about downloading
 and using the Salt Minion Windows installer.
 
+Optionally WinRM can be used instead of `winexe` if the python module `pywinrm`
+is available and WinRM is supported on the target Windows version. Information
+on pywinrm can be found at the project home:
+
+* `pywinrm project home`__
+
+.. __: https://github.com/diyan/pywinrm
+
+Additionally, a copy of the Salt Minion Windows installer must be present on
+the system on which Salt Cloud is running. This installer may be downloaded
+from saltstack.com:
+
+* `SaltStack Download Area`__
+
+.. __: https://packages.broadcom.com/artifactory/saltproject-generic/windows/
 
 .. _new-pywinrm:
 
diff --git a/doc/topics/development/conventions/formulas.rst b/doc/topics/development/conventions/formulas.rst
index ad9033b2d10..f5a8c8f1115 100644
--- a/doc/topics/development/conventions/formulas.rst
+++ b/doc/topics/development/conventions/formulas.rst
@@ -221,14 +221,10 @@ The best way to create new Formula repositories for now is to create a
 repository in your own account on GitHub and notify a SaltStack employee when
 it is ready. We will add you to the Contributors team on the
 `saltstack-formulas`_ organization and help you transfer the repository over.
-Ping a SaltStack employee on IRC (`#salt`_ on LiberaChat), join the
-``#formulas`` channel on the `salt-slack`_ (bridged to ``#saltstack-formulas``
-on LiberaChat) or send an email to the `salt-users`_ mailing list.  Note that
-IRC logs are available at http://ngxbot.nginx.org/logs/%23salt/ and archives
-for FreeNode (up to mid-June 2021) https://logbot-archive.s3.amazonaws.com/freenode/salt.gz
-and https://logbot-archive.s3.amazonaws.com/freenode/saltstack-formulas.gz.
+Join the ``#formulas`` channel on the `salt-discord`_
+or send an email to the `salt-users`_ mailing list.
 
-There are a lot of repositories in that organization! Team members can manage
+Team members can manage
 which repositories they are subscribed to on GitHub's watching page:
 https://github.com/watching.
 
@@ -246,7 +242,7 @@ your pull request has stayed open for more than a couple days feel free to
 "selfie-merge" your own pull request.
 
 .. _`at-mention`: https://help.github.com/en/github/writing-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams
-.. _`#salt`: https://web.libera.chat/#salt
+.. _`#salt`: https://discord.com/invite/J7b7EscrAs
 
 Style
 -----
@@ -262,7 +258,7 @@ file. This section contains several suggestions and examples.
     deploy_myapp:
       git.latest:
         - name: git@github.com/myco/myapp.git
-        - version: {{ salt.pillar.get('myapp:version', 'master') }}
+        - version: {{ salt['pillar.get']('myapp:version', 'master') }}
 
 Use a descriptive State ID
 ``````````````````````````
@@ -363,11 +359,11 @@ for commenting YAML code.
 
     # BAD EXAMPLE
     # The Jinja in this YAML comment is still executed!
-    # {% set apache_is_installed = 'apache' in salt.pkg.list_pkgs() %}
+    # {% set apache_is_installed = 'apache' in salt['pkg.list_pkgs']() %}
 
     # GOOD EXAMPLE
     # The Jinja in this Jinja comment will not be executed.
-    {# {% set apache_is_installed = 'apache' in salt.pkg.list_pkgs() %} #}
+    {# {% set apache_is_installed = 'apache' in salt['pkg.list_pkgs']() %} #}
 
 Easy on the Jinja!
 ------------------
@@ -427,7 +423,7 @@ Less common values are often found by running commands. For example:
 
 .. code-block:: jinja
 
-    {% set is_selinux_enabled = salt.cmd.run('sestatus') == '1' %}
+    {% set is_selinux_enabled = salt['cmd.run']('sestatus') == '1' %}
 
 This is usually best done with a variable assignment in order to separate the
 data from the state that will make use of the data.
@@ -442,7 +438,7 @@ from the Salt Master. For example:
 
 .. code-block:: jinja
 
-    {% set some_data = salt.pillar.get('some_data', {'sane default': True}) %}
+    {% set some_data = salt['pillar.get']('some_data', {'sane default': True}) %}
 
     {# or #}
 
@@ -478,7 +474,7 @@ Below is a simple example of a readable loop:
 
 .. code-block:: jinja
 
-    {% for user in salt.pillar.get('list_of_users', []) %}
+    {% for user in salt['pillar.get']('list_of_users', []) %}
 
     {# Ensure unique state IDs when looping. #}
     {{ user.name }}-{{ loop.index }}:
@@ -690,7 +686,7 @@ Macros are useful for creating reusable, parameterized states. For example:
         - groups: {{ groups | json() }}
     {% endmacro %}
 
-    {% for user_info in salt.pillar.get('my_users', []) %}
+    {% for user_info in salt['pillar.get']('my_users', []) %}
     {{ user_state('user_number_' ~ loop.index, **user_info) }}
     {% endfor %}
 
@@ -708,7 +704,7 @@ example, the following macro could be used to write a php.ini config file:
         - source: salt://php.ini.tmpl
         - template: jinja
         - context:
-            php_ini_settings: {{ salt.pillar.get('php_ini', {}) | json() }}
+            php_ini_settings: {{ salt['pillar.get']('php_ini', {}) | json() }}
 
 ``/srv/pillar/php.sls``:
 
@@ -920,7 +916,7 @@ Pillar can also be used.
 .. code-block:: jinja
 
     {% set lookup_table = {...} %}
-    {% do lookup_table.update(salt.pillar.get('my:custom:data')) %}
+    {% do lookup_table.update(salt['pillar.get']('my:custom:data')) %}
 
 When to use lookup tables
 `````````````````````````
@@ -994,7 +990,7 @@ XML.)
 .. code-block:: jinja
 
     {% import_yaml 'tomcat/defaults.yaml' as server_xml_defaults %}
-    {% set server_xml_final_values = salt.pillar.get(
+    {% set server_xml_final_values = salt['pillar.get'](
         'appX:server_xml_overrides',
         default=server_xml_defaults,
         merge=True)
@@ -1033,11 +1029,11 @@ example:
 
     {# Extract the relevant subset for the app configured on the current
        machine (configured via a grain in this example). #}
-    {% app = app_defaults.get(salt.grains.get('role')) %}
+    {% app = app_defaults.get(salt['grains.get']('role')) %}
 
     {# Allow values from Pillar to (optionally) update values from the lookup
        table. #}
-    {% do app_defaults.update(salt.pillar.get('myapp', {})) %}
+    {% do app_defaults.update(salt['pillar.get']('myapp', {})) %}
 
     deploy_application:
       git.latest:
diff --git a/doc/topics/development/conventions/release.rst b/doc/topics/development/conventions/release.rst
index c08447e2007..1cf05c8ff1f 100644
--- a/doc/topics/development/conventions/release.rst
+++ b/doc/topics/development/conventions/release.rst
@@ -46,7 +46,7 @@ example):
 #. Publish the docs.
 #. Create release at `github`_
 #. Update win-repo-ng with new salt versions.
-#. Announce release is live to irc, salt-users, salt-announce and release slack
+#. Announce release is live to irc, salt-users, salt-announce and release discord
    community channel.
 
 
@@ -79,7 +79,7 @@ for a bugfix release.
 #. Publish the docs.
 #. Create release at `github`_
 #. Update win-repo-ng with new salt versions.
-#. Announce release is live to irc, salt-users, salt-announce and release slack channel.
+#. Announce release is live to irc, salt-users, salt-announce and release discord channel.
 
 .. _`github`: https://github.com/saltstack/salt/releases
 .. _`repo.saltproject.io`: https://repo.saltproject.io
diff --git a/doc/topics/development/conventions/style.rst b/doc/topics/development/conventions/style.rst
index 1772d6209ca..30d814d47b2 100644
--- a/doc/topics/development/conventions/style.rst
+++ b/doc/topics/development/conventions/style.rst
@@ -97,7 +97,7 @@ When adding a new function or state, where possible try to use a
         print(msg)
 
 If you are uncertain what version should be used, either consult a core
-developer in IRC or bring this up when opening your :ref:`pull request
+developer in the Community Discord or bring this up when opening your :ref:`pull request
 <installing-for-development>` and a core developer will let you know what
 version to add. Typically this will be the next element in the `periodic table
 <https://en.wikipedia.org/wiki/List_of_chemical_elements>`_.
diff --git a/doc/topics/development/pull_requests.rst b/doc/topics/development/pull_requests.rst
index 4b6ffee9135..c1cb30a035c 100644
--- a/doc/topics/development/pull_requests.rst
+++ b/doc/topics/development/pull_requests.rst
@@ -193,12 +193,21 @@ By default, PRs run a limited subset of the test suite against the following
 operating systems:
 
 * Linux:
+    - Latest ``Amazon Linux Arm64``
+    - Latest ``Amazon Linux x86_64``
+    - Latest ``Debian Linux Arm64``
+    - Latest ``Debian Linux x86_64``
+    - Latest ``Photon OS Arm64``
+    - Latest ``Photon OS x86_64``
+    - Latest ``Rocky Linux Arm64``
     - Latest ``Rocky Linux x86_64``
-    - Latest ``Amazon Linux aarch64``
-    - Latest ``Ubuntu LTS arm64``
-    - Latest ``Arch Linux x86_64``
-* Latest ``Windows Server x86_64``
-* Latest ``MacOS arm64``
+    - Latest ``Ubuntu LTS Arm64``
+    - Latest ``Ubuntu LTS x86_64``
+* Windows Server:
+    - Latest ``Windows Server x86_64``
+* macOS:
+    - Latest ``MacOS Arm64``
+    - Latest ``MacOS x86_64``
 
 Optional OS additions
 ---------------------
diff --git a/doc/topics/jinja/index.rst b/doc/topics/jinja/index.rst
index 35fae263db7..5cf422c3ef4 100644
--- a/doc/topics/jinja/index.rst
+++ b/doc/topics/jinja/index.rst
@@ -161,7 +161,7 @@ starts at the root of the state tree or pillar.
 Errors
 ======
 
-Saltstack allows raising custom errors using the ``raise`` jinja function.
+Saltstack allows raising custom errors using the ``raise`` Jinja function.
 
 .. code-block:: jinja
 
@@ -174,8 +174,8 @@ exception is raised, causing the rendering to fail with the following message:
 
     TemplateError: Custom Error
 
-Filters
-=======
+Custom Filters
+==============
 
 Saltstack extends `builtin filters`_ with these custom filters:
 
@@ -405,8 +405,9 @@ This text will be wrapped in quotes.
 
 .. versionadded:: 2017.7.0
 
-Scan through string looking for a location where this regular expression
-produces a match. Returns ``None`` in case there were no matches found
+Looks for a match for the specified regex anywhere in the string. If the string
+does not match the regex, this filter returns ``None``. If the string _does_
+match the regex, then the `capture groups`_ for the regex will be returned.
 
 Example:
 
@@ -420,6 +421,29 @@ Returns:
 
   ("defabcdef",)
 
+If the regex you use does not contain a capture group then the number of
+capture groups will be zero, and a matching regex will return an empty tuple.
+This means that the following ``if`` statement would evaluate as ``False``:
+
+.. code-block:: jinja
+
+  {%- if 'foobar' | regex_search('foo') %}
+
+If you do not need a capture group and are just looking to test if a string
+matches a regex, then you should check to see if the filter returns ``None``:
+
+.. code-block:: jinja
+
+  {%- if (some_var | regex_search('foo')) is not none %}
+
+.. note::
+
+   In a Jinja statement, a null value (i.e. a Python ``None``) should be
+   expressed as ``none`` (i.e. lowercase). More info on this can be found in
+   the **Note** section here in the `jinja docs`_.
+
+.. _`capture groups`: https://docs.python.org/3/library/re.html#re.Match.groups
+.. _`jinja docs`:  https://jinja.palletsprojects.com/en/stable/templates/#literals
 
 .. jinja_ref:: regex_match
 
@@ -428,8 +452,8 @@ Returns:
 
 .. versionadded:: 2017.7.0
 
-If zero or more characters at the beginning of string match this regular
-expression, otherwise returns ``None``.
+Works exactly like :jinja_ref:`regex_search`, but only checks for matches at
+the _beginning_ of the string passed into this filter.
 
 Example:
 
@@ -2506,7 +2530,8 @@ dictionary of :term:`execution function <Execution Function>`.
 
 .. code-block:: jinja
 
-    # The following two function calls are equivalent.
+    # The following two function calls are mostly equivalent,
+    # but the first style should be preferred to avoid edge cases.
     {{ salt['cmd.run']('whoami') }}
     {{ salt.cmd.run('whoami') }}
 
@@ -2536,7 +2561,7 @@ For example, making the call:
 
 .. code-block:: jinja
 
-    {%- do salt.log.error('testing jinja logging') -%}
+    {%- do salt['log.error']('testing jinja logging') -%}
 
 Will insert the following message in the minion logs:
 
@@ -2552,14 +2577,14 @@ Profiling
 .. versionadded:: 3002
 
 When working with a very large codebase, it becomes increasingly imperative to
-trace inefficiencies with state and pillar render times.  The `profile` jinja
+trace inefficiencies with state and pillar render times. The `profile` Jinja
 block enables the user to get finely detailed information on the most expensive
 areas in the codebase.
 
 Profiling blocks
 ----------------
 
-Any block of jinja code can be wrapped in a ``profile`` block.  The syntax for
+Any block of Jinja code can be wrapped in a ``profile`` block.  The syntax for
 a profile block is ``{% profile as '<name>' %}<jinja code>{% endprofile %}``,
 where ``<name>`` can be any string.  The ``<name>`` token will appear in the
 log at the ``profile`` level along with the render time of the block.
@@ -2626,15 +2651,15 @@ For ``import_*`` blocks, the ``profile`` log statement has the following form:
     [...]
 
 Python Methods
-====================
+==============
 
-A powerful feature of jinja that is only hinted at in the official jinja
-documentation is that you can use the native python methods of the
-variable type. Here is the python documentation for `string methods`_.
+A powerful feature of Jinja that is only hinted at in the official Jinja
+documentation is that you can use the native Python methods of the
+variable type. Here is the Python documentation for `string methods`_.
 
 .. code-block:: jinja
 
-  {% set hostname,domain = grains.id.partition('.')[::2] %}{{ hostname }}
+  {% set hostname, domain = grains.id.partition('.')[::2] %}{{ hostname }}
 
 .. code-block:: jinja
 
@@ -2681,7 +2706,7 @@ module, say ``my_filters`` and use as:
 
 .. code-block:: jinja
 
-    {{ salt.my_filters.my_jinja_filter(my_variable) }}
+    {{ salt['my_filters.my_jinja_filter'](my_variable) }}
 
 The greatest benefit is that you are able to access thousands of existing functions, e.g.:
 
@@ -2689,16 +2714,16 @@ The greatest benefit is that you are able to access thousands of existing functi
 
   .. code-block:: jinja
 
-    {{ salt.dnsutil.AAAA('www.google.com') }}
+    {{ salt['dnsutil.AAAA']('www.google.com') }}
 
 - retrieve a specific field value from a :mod:`Redis <salt.modules.modredis>` hash:
 
   .. code-block:: jinja
 
-    {{ salt.redis.hget('foo_hash', 'bar_field') }}
+    {{ salt['redis.hget']('foo_hash', 'bar_field') }}
 
 - get the routes to ``0.0.0.0/0`` using the :mod:`NAPALM route <salt.modules.napalm_route>`:
 
   .. code-block:: jinja
 
-    {{ salt.route.show('0.0.0.0/0') }}
+    {{ salt['route.show']('0.0.0.0/0') }}
diff --git a/doc/topics/reactor/index.rst b/doc/topics/reactor/index.rst
index 7cdadff29cd..779a82b0e6e 100644
--- a/doc/topics/reactor/index.rst
+++ b/doc/topics/reactor/index.rst
@@ -394,8 +394,8 @@ For example:
 .. code-block:: jinja
 
     # /srv/salt/orchestrate/do_complex_thing.sls
-    {% set tag = salt.pillar.get('event_tag') %}
-    {% set data = salt.pillar.get('event_data') %}
+    {% set tag = salt['pillar.get']('event_tag') %}
+    {% set data = salt['pillar.get']('event_data') %}
 
     # Pass data from the event to a custom runner function.
     # The function expects a 'foo' argument.
diff --git a/doc/topics/releases/3006.9.md b/doc/topics/releases/3006.9.md
new file mode 100644
index 00000000000..5f5d64051d5
--- /dev/null
+++ b/doc/topics/releases/3006.9.md
@@ -0,0 +1,87 @@
+(release-3006.9)=
+# Salt 3006.9 release notes
+
+<!---
+Do not edit this file. This is auto generated.
+Edit the templates in doc/topics/releases/templates/
+for a given release.
+-->
+
+
+<!--
+Add release specific details below
+-->
+
+<!--
+Do not edit the changelog below.
+This is auto generated.
+-->
+## Changelog
+
+### Deprecated
+
+- Drop CentOS 7 support [#66623](https://github.com/saltstack/salt/issues/66623)
+- No longer build RPM packages with CentOS Stream 9 [#66624](https://github.com/saltstack/salt/issues/66624)
+
+
+### Fixed
+
+- Made slsutil.renderer work with salt-ssh [#50196](https://github.com/saltstack/salt/issues/50196)
+- Fixed defaults.merge is not available when using salt-ssh [#51605](https://github.com/saltstack/salt/issues/51605)
+- Fixed config.get does not support merge option with salt-ssh [#56441](https://github.com/saltstack/salt/issues/56441)
+- Update to include croniter in pkg requirements [#57649](https://github.com/saltstack/salt/issues/57649)
+- Fixed state.test does not work with salt-ssh [#61100](https://github.com/saltstack/salt/issues/61100)
+- Made slsutil.findup work with salt-ssh [#61143](https://github.com/saltstack/salt/issues/61143)
+- Fixes multiple issues with the cmd module on Windows. Scripts are called using
+  the ``-File`` parameter to the ``powershell.exe`` binary. ``CLIXML`` data in
+  stderr is now removed (only applies to encoded commands). Commands can now be
+  sent to ``cmd.powershell`` as a list. Makes sure JSON data returned is valid.
+  Strips whitespace from the return when using ``runas``. [#61166](https://github.com/saltstack/salt/issues/61166)
+- Fixed the win_lgpo_netsh salt util to handle non-English systems. This was a
+  rewrite to use PowerShell instead of netsh to make the changes on the system [#61534](https://github.com/saltstack/salt/issues/61534)
+- file.replace and file.search work properly with /proc files [#63102](https://github.com/saltstack/salt/issues/63102)
+- Fix utf8 handling in 'pass' renderer [#64300](https://github.com/saltstack/salt/issues/64300)
+- Fixed incorrect version argument will be ignored for multiple package targets warning when using pkgs argument to yumpkg module. [#64563](https://github.com/saltstack/salt/issues/64563)
+- salt-cloud honors root_dir config setting for log_file location and fixes for root_dir locations on windows. [#64728](https://github.com/saltstack/salt/issues/64728)
+- Fixed slsutil.update with salt-ssh during template rendering [#65067](https://github.com/saltstack/salt/issues/65067)
+- Fix config.items when called on minion [#65251](https://github.com/saltstack/salt/issues/65251)
+- Ensure on rpm and deb systems, that user and group for existing Salt, is maintained on upgrade [#65264](https://github.com/saltstack/salt/issues/65264)
+- Fix typo in nftables module to ensure unique nft family values [#65295](https://github.com/saltstack/salt/issues/65295)
+- pkg.installed state aggregate does not honors requires requisite [#65304](https://github.com/saltstack/salt/issues/65304)
+- Added SSH wrapper for logmod [#65630](https://github.com/saltstack/salt/issues/65630)
+- Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM [#65816](https://github.com/saltstack/salt/issues/65816)
+- Corrected x509_v2 CRL creation `last_update` and `next_update` values when system timezone is not UTC [#65837](https://github.com/saltstack/salt/issues/65837)
+- Make sure the root minion process handles SIGUSR1 and emits a traceback like it's child processes [#66095](https://github.com/saltstack/salt/issues/66095)
+- Replaced pyvenv with builtin venv for virtualenv_mod [#66132](https://github.com/saltstack/salt/issues/66132)
+- Made `file.managed` skip download of a remote source if the managed file already exists with the correct hash [#66342](https://github.com/saltstack/salt/issues/66342)
+- Fix win_task ExecutionTimeLimit and result/error code interpretation [#66347](https://github.com/saltstack/salt/issues/66347), [#66441](https://github.com/saltstack/salt/issues/66441)
+- Fixed nftables.build_rule breaks ipv6 rules by using the wrong syntax for source and destination addresses [#66382](https://github.com/saltstack/salt/issues/66382)
+- Fixed x509_v2 certificate.managed crash for locally signed certificates if the signing policy defines signing_private_key [#66414](https://github.com/saltstack/salt/issues/66414)
+- Fixed parallel state execution with Salt-SSH [#66514](https://github.com/saltstack/salt/issues/66514)
+- Fix support for FIPS approved encryption and signing algorithms. [#66579](https://github.com/saltstack/salt/issues/66579)
+- Fix relative file_roots paths [#66588](https://github.com/saltstack/salt/issues/66588)
+- Fixed an issue with cmd.run with requirements when the shell is not the
+  default [#66596](https://github.com/saltstack/salt/issues/66596)
+- Fix RPM package provides [#66604](https://github.com/saltstack/salt/issues/66604)
+- Upgrade relAenv to 0.16.1. This release fixes several package installs for salt-pip [#66632](https://github.com/saltstack/salt/issues/66632)
+- Upgrade relenv to 0.17.0 (https://github.com/saltstack/relenv/blob/v0.17.0/CHANGELOG.md) [#66663](https://github.com/saltstack/salt/issues/66663)
+- Upgrade dependencies due to security issues:
+  - pymysql>=1.1.1
+  - requests>=2.32.0
+  - docker>=7.1.0 [#66666](https://github.com/saltstack/salt/issues/66666)
+- Corrected missed line in branch 3006.x when backporting from PR 61620 and 65044 [#66683](https://github.com/saltstack/salt/issues/66683)
+- Remove debug output from shell scripts for packaging [#66747](https://github.com/saltstack/salt/issues/66747)
+
+
+### Added
+
+- Add Ubuntu 24.04 support [#66180](https://github.com/saltstack/salt/issues/66180)
+- Add Fedora 40 support, replacing Fedora 39 [#66300](https://github.com/saltstack/salt/issues/66300)
+- Build RPM packages with Rocky Linux 9 (instead of CentOS Stream 9) [#66624](https://github.com/saltstack/salt/issues/66624)
+
+
+### Security
+
+- Bump to ``jinja2==3.1.4`` due to https://github.com/advisories/GHSA-h75v-3vvj-5mfj [#66488](https://github.com/saltstack/salt/issues/66488)
+- CVE-2024-37088 salt-call will fail with exit code 1 if bad pillar data is
+  encountered. [#66702](https://github.com/saltstack/salt/issues/66702)
diff --git a/doc/topics/releases/templates/3006.9.md.template b/doc/topics/releases/templates/3006.9.md.template
new file mode 100644
index 00000000000..6bcb03dd59c
--- /dev/null
+++ b/doc/topics/releases/templates/3006.9.md.template
@@ -0,0 +1,14 @@
+(release-3006.9)=
+# Salt 3006.9 release notes{{ unreleased }}
+{{ warning }}
+
+<!--
+Add release specific details below
+-->
+
+<!--
+Do not edit the changelog below.
+This is auto generated.
+-->
+## Changelog
+{{ changelog }}
diff --git a/doc/topics/tutorials/firewall.rst b/doc/topics/tutorials/firewall.rst
index d7b199e3136..e91b4db8121 100644
--- a/doc/topics/tutorials/firewall.rst
+++ b/doc/topics/tutorials/firewall.rst
@@ -176,7 +176,7 @@ to allow traffic on ``tcp/4505`` and ``tcp/4506``:
 **Ubuntu**
 
 Salt installs firewall rules in :blob:`/etc/ufw/applications.d/salt.ufw
-<pkg/salt.ufw>`. Enable with:
+<pkg/common/salt.ufw>`. Enable with:
 
 .. code-block:: bash
 
diff --git a/doc/topics/tutorials/jinja_to_execution_module.rst b/doc/topics/tutorials/jinja_to_execution_module.rst
index 25fa66ad89c..7bae33f948b 100644
--- a/doc/topics/tutorials/jinja_to_execution_module.rst
+++ b/doc/topics/tutorials/jinja_to_execution_module.rst
@@ -52,7 +52,7 @@ Unfortunately, it can lead to code that looks like the following.
     {%   do storage.update({'server_ip': servers_list[server_index]}) %}
     {% endif %}
 
-    {% for network, _ in salt.pillar.get('inventory:networks', {}) | dictsort %}
+    {% for network, _ in salt['pillar.get']('inventory:networks', {}) | dictsort %}
     {%   do storage.ipsets.hash_net.foo_networks.append(network) %}
     {% endfor %}
 
@@ -88,7 +88,7 @@ Let's move that to an execution module.
 
     {% do storage.update({'server_ip': salt['storage.ip']()}) %}
 
-    {% for network, _ in salt.pillar.get('inventory:networks', {}) | dictsort %}
+    {% for network, _ in salt['pillar.get']('inventory:networks', {}) | dictsort %}
     {%   do storage.ipsets.hash_net.af_networks.append(network) %}
     {% endfor %}
 
diff --git a/doc/topics/tutorials/master-cluster.rst b/doc/topics/tutorials/master-cluster.rst
index 9f693981bc3..7b7ae25f11d 100644
--- a/doc/topics/tutorials/master-cluster.rst
+++ b/doc/topics/tutorials/master-cluster.rst
@@ -45,18 +45,20 @@ HAProxy:
             mode tcp
             bind 10.27.5.116:4505
             option tcplog
-            timeout client  1m
+            # This timeout is equal to the publish_session setting of the
+            # masters.
+            timeout client 86400s
             default_backend salt-master-pub-backend
 
         backend salt-master-pub-backend
             mode tcp
-            option tcplog
             #option log-health-checks
             log global
-            #balance source
             balance roundrobin
             timeout connect 10s
-            timeout server 1m
+            # This timeout is equal to the publish_session setting of the
+            # masters.
+            timeout server 86400s
             server rserve1 10.27.12.13:4505 check
             server rserve2 10.27.7.126:4505 check
             server rserve3 10.27.3.73:4505 check
@@ -70,11 +72,8 @@ HAProxy:
 
         backend salt-master-req-backend
             mode tcp
-            option tcplog
-            #option log-health-checks
             log global
             balance roundrobin
-            #balance source
             timeout connect 10s
             timeout server 1m
             server rserve1 10.27.12.13:4506 check
@@ -93,6 +92,8 @@ Master Config:
         cluster_pki_dir: /my/gluster/share/pki
         cachedir: /my/gluster/share/cache
         file_roots:
+          base:
             - /my/gluster/share/srv/salt
         pillar_roots:
+          base:
             - /my/gluster/share/srv/pillar
diff --git a/doc/topics/tutorials/states_pt4.rst b/doc/topics/tutorials/states_pt4.rst
index 96701b20af3..943f3079f11 100644
--- a/doc/topics/tutorials/states_pt4.rst
+++ b/doc/topics/tutorials/states_pt4.rst
@@ -211,7 +211,7 @@ can be found on GitHub in the `saltstack-formulas`_ collection of repositories.
 If you have any questions, suggestions, or just want to chat with other people
 who are using Salt, we have a very active community and we'd love to hear from
 you. One of the best places to talk to the community is on the
-`Salt Project Slack workspace <https://saltstackcommunity.slack.com/>`_.
+`Salt Project Discord Community <https://discord.com/invite/J7b7EscrAs>`_.
 
 In addition, by continuing to the :ref:`Orchestrate Runner <orchestrate-runner>` docs,
 you can learn about the powerful orchestration of which Salt is capable.
diff --git a/noxfile.py b/noxfile.py
index fa7f95fe866..70e2ff5a63c 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -1284,7 +1284,10 @@ def decompress_dependencies(session):
                 if not os.path.isabs(resolved_link):
                     # Relative symlinks, resolve them
                     resolved_link = os.path.join(scan_path, resolved_link)
-                if not os.path.exists(resolved_link):
+                prefix_check = False
+                if platform == "windows":
+                    prefix_check = resolved_link.startswith("\\\\?")
+                if not os.path.exists(resolved_link) or prefix_check:
                     session.log("The symlink %r looks to be broken", resolved_link)
                     # This is a broken link, fix it
                     resolved_link_suffix = resolved_link.split(
@@ -1839,13 +1842,24 @@ def ci_test_onedir_pkgs(session):
     session_warn(session, "Replacing VirtualEnv instance...")
 
     ci_test_onedir_path = REPO_ROOT / ".nox" / "ci-test-onedir"
-    session._runner.venv = VirtualEnv(
-        str(ci_test_onedir_path.relative_to(REPO_ROOT)),
-        interpreter=session._runner.func.python,
-        reuse_existing=True,
-        venv=session._runner.venv.venv_or_virtualenv == "venv",
-        venv_params=session._runner.venv.venv_params,
-    )
+    if hasattr(session._runner.venv, "venv_or_virtualenv"):
+        venv = session._runner.venv.venv_or_virtualenv == "venv"
+        session._runner.venv = VirtualEnv(
+            str(ci_test_onedir_path.relative_to(REPO_ROOT)),
+            interpreter=session._runner.func.python,
+            reuse_existing=True,
+            venv=venv,
+            venv_params=session._runner.venv.venv_params,
+        )
+    else:
+        venv = session._runner.venv.venv_backend in ("venv", "virtualenv")
+        session._runner.venv = VirtualEnv(  # pylint: disable=unexpected-keyword-arg
+            str(ci_test_onedir_path.relative_to(REPO_ROOT)),
+            interpreter=session._runner.func.python,
+            reuse_existing=True,
+            venv_backend=session._runner.venv.venv_backend,
+            venv_params=session._runner.venv.venv_params,
+        )
     os.environ["VIRTUAL_ENV"] = session._runner.venv.location
     session._runner.venv.create()
 
@@ -1870,18 +1884,10 @@ def ci_test_onedir_pkgs(session):
             "--upgrade",
             "--no-uninstall",
         ],
-        "upgrade-classic": [
-            "--upgrade",
-            "--no-uninstall",
-        ],
         "downgrade": [
             "--downgrade",
             "--no-uninstall",
         ],
-        "downgrade-classic": [
-            "--downgrade",
-            "--no-uninstall",
-        ],
         "download-pkgs": [
             "--download-pkgs",
         ],
@@ -1912,9 +1918,6 @@ def ci_test_onedir_pkgs(session):
         "PKG_TEST_TYPE": chunk,
     }
 
-    if chunk in ("upgrade-classic", "downgrade-classic"):
-        cmd_args.append("--classic")
-
     pytest_args = (
         common_pytest_args[:]
         + cmd_args[:]
@@ -1969,12 +1972,11 @@ def ci_test_onedir_pkgs(session):
         )
 
     if chunk not in ("install", "download-pkgs"):
-        cmd_args = chunks["install"]
+        cmd_args = chunks[chunk]
         pytest_args = (
             common_pytest_args[:]
             + cmd_args[:]
             + [
-                "--no-install",
                 "--junitxml=artifacts/xml-unittests-output/test-results-install.xml",
                 "--log-file=artifacts/logs/runtests-install.log",
             ]
@@ -1982,8 +1984,6 @@ def ci_test_onedir_pkgs(session):
         )
         if "downgrade" in chunk:
             pytest_args.append("--use-prev-version")
-        if chunk in ("upgrade-classic", "downgrade-classic"):
-            pytest_args.append("--classic")
         if append_tests_path:
             pytest_args.append("tests/pytests/pkg/")
         try:
@@ -1992,12 +1992,11 @@ def ci_test_onedir_pkgs(session):
             if os.environ.get("RERUN_FAILURES", "0") == "0":
                 # Don't rerun on failures
                 return
-            cmd_args = chunks["install"]
+            cmd_args = chunks[chunk]
             pytest_args = (
                 common_pytest_args[:]
                 + cmd_args[:]
                 + [
-                    "--no-install",
                     "--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml",
                     "--log-file=artifacts/logs/runtests-install-rerun.log",
                     "--lf",
@@ -2006,8 +2005,6 @@ def ci_test_onedir_pkgs(session):
             )
             if "downgrade" in chunk:
                 pytest_args.append("--use-prev-version")
-            if chunk in ("upgrade-classic", "downgrade-classic"):
-                pytest_args.append("--classic")
             if append_tests_path:
                 pytest_args.append("tests/pytests/pkg/")
             _pytest(
diff --git a/pkg/debian/changelog b/pkg/debian/changelog
index d90880dfb95..0e8f39d4f45 100644
--- a/pkg/debian/changelog
+++ b/pkg/debian/changelog
@@ -42,6 +42,70 @@ salt (3007.1) stable; urgency=medium
 
  -- Salt Project Packaging <saltproject-packaging@vmware.com>  Sun, 19 May 2024 12:48:59 +0000
 
+salt (3006.9) stable; urgency=medium
+
+
+  # Deprecated
+
+  * Drop CentOS 7 support [#66623](https://github.com/saltstack/salt/issues/66623)
+  * No longer build RPM packages with CentOS Stream 9 [#66624](https://github.com/saltstack/salt/issues/66624)
+
+  # Fixed
+
+  * Made slsutil.renderer work with salt-ssh [#50196](https://github.com/saltstack/salt/issues/50196)
+  * Fixed defaults.merge is not available when using salt-ssh [#51605](https://github.com/saltstack/salt/issues/51605)
+  * Fixed config.get does not support merge option with salt-ssh [#56441](https://github.com/saltstack/salt/issues/56441)
+  * Update to include croniter in pkg requirements [#57649](https://github.com/saltstack/salt/issues/57649)
+  * Fixed state.test does not work with salt-ssh [#61100](https://github.com/saltstack/salt/issues/61100)
+  * Made slsutil.findup work with salt-ssh [#61143](https://github.com/saltstack/salt/issues/61143)
+  * file.replace and file.search work properly with /proc files [#63102](https://github.com/saltstack/salt/issues/63102)
+  * Fix utf8 handling in 'pass' renderer [#64300](https://github.com/saltstack/salt/issues/64300)
+  * Fixed incorrect version argument will be ignored for multiple package targets warning when using pkgs argument to yumpkg module. [#64563](https://github.com/saltstack/salt/issues/64563)
+  * salt-cloud honors root_dir config setting for log_file location and fixes for root_dir locations on windows. [#64728](https://github.com/saltstack/salt/issues/64728)
+  * Fixed slsutil.update with salt-ssh during template rendering [#65067](https://github.com/saltstack/salt/issues/65067)
+  * Fix config.items when called on minion [#65251](https://github.com/saltstack/salt/issues/65251)
+  * Ensure on rpm and deb systems, that user and group for existing Salt, is maintained on upgrade [#65264](https://github.com/saltstack/salt/issues/65264)
+  * Fix typo in nftables module to ensure unique nft family values [#65295](https://github.com/saltstack/salt/issues/65295)
+  * pkg.installed state aggregate does not honors requires requisite [#65304](https://github.com/saltstack/salt/issues/65304)
+  * Added SSH wrapper for logmod [#65630](https://github.com/saltstack/salt/issues/65630)
+  * Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM [#65816](https://github.com/saltstack/salt/issues/65816)
+  * Corrected x509_v2 CRL creation `last_update` and `next_update` values when system timezone is not UTC [#65837](https://github.com/saltstack/salt/issues/65837)
+  * Make sure the root minion process handles SIGUSR1 and emits a traceback like it's child processes [#66095](https://github.com/saltstack/salt/issues/66095)
+  * Replaced pyvenv with builtin venv for virtualenv_mod [#66132](https://github.com/saltstack/salt/issues/66132)
+  * Made `file.managed` skip download of a remote source if the managed file already exists with the correct hash [#66342](https://github.com/saltstack/salt/issues/66342)
+  * Fix win_task ExecutionTimeLimit and result/error code interpretation [#66347](https://github.com/saltstack/salt/issues/66347), [#66441](https://github.com/saltstack/salt/issues/66441)
+  * Fixed nftables.build_rule breaks ipv6 rules by using the wrong syntax for source and destination addresses [#66382](https://github.com/saltstack/salt/issues/66382)
+  * Fixed x509_v2 certificate.managed crash for locally signed certificates if the signing policy defines signing_private_key [#66414](https://github.com/saltstack/salt/issues/66414)
+  * Fixed parallel state execution with Salt-SSH [#66514](https://github.com/saltstack/salt/issues/66514)
+  * Fix support for FIPS approved encryption and signing algorithms. [#66579](https://github.com/saltstack/salt/issues/66579)
+  * Fix relative file_roots paths [#66588](https://github.com/saltstack/salt/issues/66588)
+  * Fixed an issue with cmd.run with requirements when the shell is not the
+    default [#66596](https://github.com/saltstack/salt/issues/66596)
+  * Fix RPM package provides [#66604](https://github.com/saltstack/salt/issues/66604)
+  * Upgrade relAenv to 0.16.1. This release fixes several package installs for salt-pip [#66632](https://github.com/saltstack/salt/issues/66632)
+  * Upgrade relenv to 0.17.0 (https://github.com/saltstack/relenv/blob/v0.17.0/CHANGELOG.md) [#66663](https://github.com/saltstack/salt/issues/66663)
+  * Upgrade dependencies due to security issues:
+    * pymysql>=1.1.1
+    * requests>=2.32.0
+    * docker>=7.1.0 [#66666](https://github.com/saltstack/salt/issues/66666)
+  * Corrected missed line in branch 3006.x when backporting from PR 61620 and 65044 [#66683](https://github.com/saltstack/salt/issues/66683)
+  * Remove debug output from shell scripts for packaging [#66747](https://github.com/saltstack/salt/issues/66747)
+
+  # Added
+
+  * Add Ubuntu 24.04 support [#66180](https://github.com/saltstack/salt/issues/66180)
+  * Add Fedora 40 support, replacing Fedora 39 [#66300](https://github.com/saltstack/salt/issues/66300)
+  * Build RPM packages with Rocky Linux 9 (instead of CentOS Stream 9) [#66624](https://github.com/saltstack/salt/issues/66624)
+
+  # Security
+
+  * Bump to ``jinja2==3.1.4`` due to https://github.com/advisories/GHSA-h75v-3vvj-5mfj [#66488](https://github.com/saltstack/salt/issues/66488)
+  * CVE-2024-37088 salt-call will fail with exit code 1 if bad pillar data is
+    encountered. [#66702](https://github.com/saltstack/salt/issues/66702)
+
+
+ -- Salt Project Packaging <saltproject-packaging@vmware.com>  Mon, 29 Jul 2024 07:42:36 +0000
+
 salt (3006.8) stable; urgency=medium
 
 
diff --git a/pkg/debian/rules b/pkg/debian/rules
index 40446c7b25f..9ebc51b9422 100755
--- a/pkg/debian/rules
+++ b/pkg/debian/rules
@@ -4,7 +4,7 @@ DH_VERBOSE = 1
 .PHONY: override_dh_strip
 
 %:
-	dh $@
+	dh $@ --with bash-completion,systemd
 
 # dh_auto_clean tries to invoke distutils causing failures.
 override_dh_auto_clean:
diff --git a/pkg/debian/salt-api.postinst b/pkg/debian/salt-api.postinst
index 9345d72bf2a..3b78211922a 100644
--- a/pkg/debian/salt-api.postinst
+++ b/pkg/debian/salt-api.postinst
@@ -1,10 +1,37 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
 case "$1" in
   configure)
-    if [ ! -e "/var/log/salt/api" ]; then
-      touch /var/log/salt/api
-      chmod 640 /var/log/salt/api
+    db_get salt-api/user
+    if [ "$RET" != "root" ]; then
+      if [ ! -e "/var/log/salt/api" ]; then
+        touch /var/log/salt/api
+        chmod 640 /var/log/salt/api
+      fi
+      chown $RET:$RET /var/log/salt/api
+    fi
+    if command -v systemctl; then
+        db_get salt-api/active
+        RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+        if [ "$RESLT" != 10 ]; then
+            systemctl daemon-reload
+            if [ "$RESLT" = "active" ]; then
+                systemctl restart salt-api
+            fi
+            db_get salt-api/enabled
+            RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+            if [ "$RESLT" = "disabled" ]; then
+                systemctl disable salt-api
+            else
+                systemctl enable salt-api
+            fi
+        else
+            systemctl daemon-reload
+            systemctl restart salt-api
+            systemctl enable salt-api
+        fi
     fi
-    chown salt:salt /var/log/salt/api
-    if command -v systemctl; then systemctl enable salt-api; fi
   ;;
 esac
diff --git a/pkg/debian/salt-api.preinst b/pkg/debian/salt-api.preinst
new file mode 100644
index 00000000000..c063108ea55
--- /dev/null
+++ b/pkg/debian/salt-api.preinst
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
+case "$1" in
+  upgrade)
+    [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt
+    [ -z "$SALT_USER" ] && SALT_USER=salt
+    [ -z "$SALT_NAME" ] && SALT_NAME="Salt"
+    [ -z "$SALT_GROUP" ] && SALT_GROUP=salt
+
+    # Reset permissions to fix previous installs
+    CUR_USER=$(ls -dl /run/salt-api.pid | cut -d ' ' -f 3)
+    CUR_GROUP=$(ls -dl /run/salt-api.pid | cut -d ' ' -f 4)
+    db_set salt-api/user $CUR_USER
+    chown -R $CUR_USER:$CUR_GROUP /var/log/salt/api
+    if command -v systemctl; then
+        SM_ENABLED=$(systemctl show -p UnitFileState salt-api | cut -d '=' -f 2)
+        db_set salt-api/enabled $SM_ENABLED
+        SM_ACTIVE=$(systemctl is-active salt-api)
+        db_set salt-api/active $SM_ACTIVE
+    else
+        db_set salt-api/enabled enabled
+        db_set salt-api/active active
+    fi
+    ;;
+esac
diff --git a/pkg/debian/salt-api.templates b/pkg/debian/salt-api.templates
new file mode 100644
index 00000000000..88e4b0823c7
--- /dev/null
+++ b/pkg/debian/salt-api.templates
@@ -0,0 +1,17 @@
+Template: salt-api/user
+Type: string
+Default: salt
+Description: User for salt-api
+  User to run the salt-api process as
+
+Template: salt-api/enabled
+Type: string
+Default: enabled
+Description: Systemd enable state for salt-api
+ default enable state for salt-api systemd state
+
+Template: salt-api/active
+Type: string
+Default: active
+Description: Systemd active state for salt-api
+ default active state for salt-api systemd state
diff --git a/pkg/debian/salt-cloud.postinst b/pkg/debian/salt-cloud.postinst
index a92551161da..a6c3c2119a9 100644
--- a/pkg/debian/salt-cloud.postinst
+++ b/pkg/debian/salt-cloud.postinst
@@ -1,6 +1,13 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
 case "$1" in
   configure)
-    PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush;")
-    chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy
+    db_get salt-master/user
+    if [ "$RET" != "root" ]; then
+      PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush;")
+      chown -R $RET:$RET /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy
+    fi
   ;;
 esac
diff --git a/pkg/common/salt.bash b/pkg/debian/salt-common.bash-completion
similarity index 63%
rename from pkg/common/salt.bash
rename to pkg/debian/salt-common.bash-completion
index 35fe0695dbe..aba866bc795 100644
--- a/pkg/common/salt.bash
+++ b/pkg/debian/salt-common.bash-completion
@@ -5,86 +5,47 @@
 # TODO: solve somehow completion for  salt -G pythonversion:[tab]
 #       (not sure what to do with lists)
 # TODO: --range[tab] --   how?
+# TODO: -E --exsel[tab] -- how?
 # TODO: --compound[tab] -- how?
 # TODO: use history to extract some words, esp. if ${cur} is empty
-# TODO: TEST EVERYTHING a lot
+# TODO: TEST EVERYTING a lot
+# TODO: cache results of some functions?  where? how long?
 # TODO: is it ok to use '--timeout 2' ?
 
 
 _salt_get_grains(){
     if [ "$1" = 'local' ] ; then
-        salt-call --log-level=error --out=txt -- grains.ls | sed  's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
+        salt-call --out=txt -- grains.ls | sed  's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
     else
-      salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.ls | sed  's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
+      salt '*' --timeout 2 --out=txt -- grains.ls | sed  's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
     fi
 }
 
 _salt_get_grain_values(){
     if [ "$1" = 'local' ] ; then
-        salt-call --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
+        salt-call --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
     else
-        salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
+        salt '*' --timeout 2 --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
     fi
 }
 
-_salt_get_keys(){
-    for type in $*; do
-      # remove header from data:
-      salt-key --no-color -l $type | tail -n+2
-    done
-}
-
-_salt_list_functions(){
-    # salt-call: get all functions on this minion
-    # salt: get all functions on all minions
-    # sed: remove all array overhead and convert to newline separated list
-    # sort: chop out doubled entries, so overhead is minimal later during actual completion
-    if [ "$1" = 'local' ] ; then
-        salt-call --log-level=quiet --out=txt -- sys.list_functions \
-          | sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
-          | sort -u
-    else
-        salt '*' --timeout 2 --hide-timeout --log-level=quiet --out=txt -- sys.list_functions \
-          | sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
-          | sort -u
-    fi
-}
-
-_salt_get_coms() {
-    CACHE_DIR="$HOME/.cache/salt-${1}-comp-cache_functions"
-    local _salt_cache_functions=${SALT_COMP_CACHE_FUNCTIONS:=$CACHE_DIR}
-    local _salt_cache_timeout=${SALT_COMP_CACHE_TIMEOUT:='last hour'}
-
-    if [ ! -d "$(dirname ${_salt_cache_functions})" ]; then
-        mkdir -p "$(dirname ${_salt_cache_functions})"
-    fi
-
-    # Regenerate cache if timed out
-    if [[ "$(stat --format=%Z ${_salt_cache_functions} 2>/dev/null)" -lt "$(date --date="${_salt_cache_timeout}" +%s)" ]]; then
-	_salt_list_functions $1 > "${_salt_cache_functions}"
-    fi
-
-    # filter results, to only print the part to next dot (or end of function)
-    sed 's/^\('${cur}'\(\.\|[^.]*\)\)\?.*/\1/' "${_salt_cache_functions}" | sort -u
-}
 
 _salt(){
-
     local cur prev opts _salt_grains _salt_coms pprev ppprev
     COMPREPLY=()
     cur="${COMP_WORDS[COMP_CWORD]}"
     prev="${COMP_WORDS[COMP_CWORD-1]}"
     if [ ${COMP_CWORD} -gt 2 ]; then
-        pprev="${COMP_WORDS[COMP_CWORD-2]}"
+	pprev="${COMP_WORDS[COMP_CWORD-2]}"
     fi
     if [ ${COMP_CWORD} -gt 3 ]; then
-        ppprev="${COMP_WORDS[COMP_CWORD-3]}"
+	ppprev="${COMP_WORDS[COMP_CWORD-3]}"
     fi
 
     opts="-h --help -d --doc --documentation --version --versions-report -c \
           --config-dir= -v --verbose -t --timeout= -s --static -b --batch= \
           --batch-size= -E --pcre -L --list -G --grain --grain-pcre -N \
-          --nodegroup -R --range -C --compound -I --pillar \
+          --nodegroup -R --range -C --compound -X --exsel -I --pillar \
           --return= -a --auth= --eauth= --extended-auth= -T --make-token -S \
           --ipcidr --out=pprint --out=yaml --out=overstatestage --out=json \
           --out=raw --out=highstate --out=key --out=txt --no-color --out-indent= "
@@ -98,7 +59,7 @@ _salt(){
     case "${pprev}" in
     -G|--grain|--grain-pcre)
     if [ "${cur}" = ":" ]; then
-        COMPREPLY=($(compgen -W "`_salt_get_grain_values ${prev}`"))
+        COMPREPLY=($(compgen -W "`_salt_get_grain_values ${prev}`"  ))
         return 0
     fi
     ;;
@@ -126,17 +87,17 @@ _salt(){
         return 0
         ;;
      salt)
-        COMPREPLY=($(compgen -W "\'*\' ${opts} $(_salt_get_keys acc)" -- ${cur}))
+        COMPREPLY=($(compgen -W "\'*\' ${opts} `salt-key --no-color -l acc`" -- ${cur}))
         return 0
         ;;
      -E|--pcre)
-        COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
+        COMPREPLY=($(compgen -W "`salt-key --no-color -l acc`" -- ${cur}))
         return 0
         ;;
      -G|--grain|--grain-pcre)
         COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
         return 0
-        ;;
+	;;
      -C|--compound)
         COMPREPLY=() # TODO: finish this one? how?
         return 0
@@ -149,18 +110,17 @@ _salt(){
         COMPREPLY=($(compgen -W "1 2 3 4 5 6 7 8 9 10 15 20 30 40 50 60 70 80 90 100 120 150 200"))
         return 0
         ;;
+     -X|--exsel) # TODO: finish this one? how?
+        return 0
+        ;;
      -N|--nodegroup)
-        MASTER_CONFIG='/etc/salt/master'
+	    MASTER_CONFIG='/etc/salt/master'
         COMPREPLY=($(compgen -W "`awk -F ':'  'BEGIN {print_line = 0};  /^nodegroups/ {print_line = 1;getline } print_line && /^  */ {print $1} /^[^ ]/ {print_line = 0}' <${MASTER_CONFIG}`" -- ${cur}))
         return 0
      ;;
     esac
 
-    _salt_coms=$(_salt_get_coms remote)
-
-    # If there are still dots in the suggestion, do not append space
-    grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
-
+    _salt_coms="$(salt '*' --timeout 2 --out=txt -- sys.list_functions | sed 's/^.*\[//' | tr -d ",']" )"
     all="${opts} ${_salt_coms}"
     COMPREPLY=( $(compgen -W "${all}" -- ${cur}) )
 
@@ -202,15 +162,15 @@ _saltkey(){
 
     case "${prev}" in
      -a|--accept)
-        COMPREPLY=($(compgen -W "$(_salt_get_keys un rej)" -- ${cur}))
+        COMPREPLY=($(compgen -W "$(salt-key -l un --no-color; salt-key -l rej --no-color)" -- ${cur}))
         return 0
       ;;
      -r|--reject)
-        COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
+        COMPREPLY=($(compgen -W "$(salt-key -l acc --no-color)" -- ${cur}))
         return 0
         ;;
      -d|--delete)
-        COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
+        COMPREPLY=($(compgen -W "$(salt-key -l acc --no-color; salt-key -l un --no-color; salt-key -l rej --no-color)" -- ${cur}))
         return 0
         ;;
      -c|--config)
@@ -229,7 +189,7 @@ _saltkey(){
         return 0
         ;;
      -p|--print)
-        COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
+        COMPREPLY=($(compgen -W "$(salt-key -l acc --no-color; salt-key -l un --no-color; salt-key -l rej --no-color)" -- ${cur}))
         return 0
      ;;
      -l|--list)
@@ -237,7 +197,7 @@ _saltkey(){
         return 0
      ;;
      --accept-all)
-        return 0
+	return 0
      ;;
     esac
     COMPREPLY=($(compgen -W "${opts} " -- ${cur}))
@@ -276,26 +236,22 @@ _saltcall(){
     case ${prev} in
         -m|--module-dirs)
                 COMPREPLY=( $(compgen -d ${cur} ))
+		return 0
+ 	 	;;
+	-l|--log-level)
+		COMPREPLY=( $(compgen -W "info none garbage trace warning error debug" -- ${cur}))
+		return 0
+		;;
+	-g|grains)
                 return 0
-                ;;
-        -l|--log-level)
-                COMPREPLY=( $(compgen -W "info none garbage trace warning error debug" -- ${cur}))
-                return 0
-                ;;
-        -g|grains)
-                return 0
-                ;;
-        salt-call)
+		;;
+	salt-call)
                 COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
-                return 0
-                ;;
+	        return 0
+		;;
     esac
 
-    _salt_coms=$(_salt_get_coms local)
-
-    # If there are still dots in the suggestion, do not append space
-    grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
-
+    _salt_coms="$(salt-call --out=txt -- sys.list_functions|sed 's/^.*\[//' | tr -d ",']"  )"
     COMPREPLY=( $(compgen -W "${opts} ${_salt_coms}" -- ${cur} ))
     return 0
 }
@@ -311,7 +267,7 @@ _saltcp(){
     opts="-t --timeout= -s --static -b --batch= --batch-size= \
           -h --help --version --versions-report -c --config-dir= \
           -E --pcre -L --list -G --grain --grain-pcre -N --nodegroup \
-          -R --range -C --compound -I --pillar \
+          -R --range -C --compound -X --exsel -I --pillar \
           --out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
           --out=highstate --out=key --out=txt --no-color --out-indent= "
     if [[ "${cur}" == -* ]] ; then
@@ -327,45 +283,46 @@ _saltcp(){
     fi
 
     case ${prev} in
-        salt-cp)
-            COMPREPLY=($(compgen -W "${opts} $(_salt_get_keys acc)" -- ${cur}))
-            return 0
-            ;;
+ 	salt-cp)
+	    COMPREPLY=($(compgen -W "${opts} `salt-key -l acc --no-color`" -- ${cur}))
+	    return 0
+	;;
         -t|--timeout)
-            # those numbers are just a hint
+	    # those numbers are just a hint
             COMPREPLY=($(compgen -W "2 3 4 8 10 15 20 25 30 40 60 90 120 180 240 300" -- ${cur} ))
+	    return 0
+        ;;
+	-E|--pcre)
+            COMPREPLY=($(compgen -W "`salt-key -l acc --no-color`" -- ${cur}))
             return 0
-            ;;
-    -E|--pcre)
-            COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
-            return 0
-            ;;
-    -L|--list)
-            # IMPROVEMENTS ARE WELCOME
-            prefpart="${cur%,*},"
-            postpart=${cur##*,}
-            filt="^\($(echo ${cur}| sed 's:,:\\|:g')\)$"
-            helper=($(_salt_get_keys acc | grep -v "${filt}" | sed "s/^/${prefpart}/"))
-            COMPREPLY=($(compgen -W "${helper[*]}" -- ${cur}))
-            return 0
-            ;;
-    -G|--grain|--grain-pcre)
+	;;
+	-L|--list)
+	    # IMPROVEMENTS ARE WELCOME
+	    prefpart="${cur%,*},"
+	    postpart=${cur##*,}
+	    filt="^\($(echo ${cur}| sed 's:,:\\|:g')\)$"
+            helper=($(salt-key -l acc --no-color | grep -v "${filt}" | sed "s/^/${prefpart}/"))
+	    COMPREPLY=($(compgen -W "${helper[*]}" -- ${cur}))
+
+	    return 0
+	;;
+	-G|--grain|--grain-pcre)
             COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
             return 0
-            ;;
-    # FIXME
-    -R|--range)
-            # FIXME ??
-            return 0
-            ;;
-    -C|--compound)
-            # FIXME ??
-            return 0
-            ;;
-    -c|--config)
-            COMPREPLY=($(compgen -f -- ${cur}))
-            return 0
-            ;;
+	    ;;
+	    # FIXME
+	-R|--range)
+	    # FIXME ??
+	    return 0
+	;;
+	-C|--compound)
+	    # FIXME ??
+	    return 0
+	;;
+	-c|--config)
+	    COMPREPLY=($(compgen -f -- ${cur}))
+	    return 0
+	;;
     esac
 
    # default is using opts:
diff --git a/pkg/debian/salt-common.install b/pkg/debian/salt-common.install
index 4f8dac552ec..63f1d5a1287 100644
--- a/pkg/debian/salt-common.install
+++ b/pkg/debian/salt-common.install
@@ -1,9 +1,10 @@
+#! /usr/bin/dh-exec
+
 pkg/common/logrotate/salt-common /etc/logrotate.d
 pkg/common/fish-completions/salt-cp.fish /usr/share/fish/vendor_completions.d
 pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d
 pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d
 pkg/common/fish-completions/salt_common.fish /usr/share/fish/vendor_completions.d
-pkg/common/salt.bash /usr/share/bash-completions/completions/salt-common.bash
 pkg/common/fish-completions/salt-minion.fish /usr/share/fish/vendor_completions.d
 pkg/common/fish-completions/salt-key.fish /usr/share/fish/vendor_completions.d
 pkg/common/fish-completions/salt-master.fish /usr/share/fish/vendor_completions.d
diff --git a/pkg/debian/salt-common.links b/pkg/debian/salt-common.links
index ef1cd42e5dd..cddd400ceeb 100644
--- a/pkg/debian/salt-common.links
+++ b/pkg/debian/salt-common.links
@@ -1,2 +1,9 @@
+# permissions on /var/log/salt to permit adm group ownership
+salt-common: non-standard-dir-perm
+
+# minor formatting error in table in man page
+salt-common: manpage-has-errors-from-man
+
 opt/saltstack/salt/salt-pip /usr/bin/salt-pip
 opt/saltstack/salt/salt-call /usr/bin/salt-call
+usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt-call
diff --git a/pkg/debian/salt-master.links b/pkg/debian/salt-master.links
index e6c0ef2446a..77c8bdc67b2 100644
--- a/pkg/debian/salt-master.links
+++ b/pkg/debian/salt-master.links
@@ -4,3 +4,6 @@ opt/saltstack/salt/salt-cp /usr/bin/salt-cp
 opt/saltstack/salt/salt-key /usr/bin/salt-key
 opt/saltstack/salt/salt-run /usr/bin/salt-run
 opt/saltstack/salt/spm /usr/bin/spm
+usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt
+usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt-cp
+usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt-key
diff --git a/pkg/debian/salt-master.postinst b/pkg/debian/salt-master.postinst
index 4f7686d8ed9..be7064f9bad 100644
--- a/pkg/debian/salt-master.postinst
+++ b/pkg/debian/salt-master.postinst
@@ -1,14 +1,41 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
 case "$1" in
   configure)
-    if [ ! -e "/var/log/salt/master" ]; then
-      touch /var/log/salt/master
-      chmod 640 /var/log/salt/master
+    db_get salt-master/user
+    if [ "$RET" != "root" ]; then
+      if [ ! -e "/var/log/salt/master" ]; then
+        touch /var/log/salt/master
+        chmod 640 /var/log/salt/master
+      fi
+      if [ ! -e "/var/log/salt/key" ]; then
+        touch /var/log/salt/key
+        chmod 640 /var/log/salt/key
+      fi
+      chown -R $RET:$RET /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master /var/log/salt/key /var/cache/salt/master /var/run/salt/master
     fi
-    if [ ! -e "/var/log/salt/key" ]; then
-      touch /var/log/salt/key
-      chmod 640 /var/log/salt/key
+    if command -v systemctl; then
+        db_get salt-master/active
+        RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+        if [ "$RESLT" != 10 ]; then
+            systemctl daemon-reload
+            if [ "$RESLT" = "active" ]; then
+                systemctl restart salt-master
+            fi
+            db_get salt-master/enabled
+            RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+            if [ "$RESLT" = "disabled" ]; then
+                systemctl disable salt-master
+            else
+                systemctl enable salt-master
+            fi
+        else
+            systemctl daemon-reload
+            systemctl restart salt-master
+            systemctl enable salt-master
+        fi
     fi
-    chown -R salt:salt /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master /var/log/salt/key /var/cache/salt/master /var/run/salt/master
-    if command -v systemctl; then systemctl enable salt-master; fi
   ;;
 esac
diff --git a/pkg/debian/salt-master.preinst b/pkg/debian/salt-master.preinst
index f205423079c..a96f9dd6767 100644
--- a/pkg/debian/salt-master.preinst
+++ b/pkg/debian/salt-master.preinst
@@ -1,5 +1,9 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
 case "$1" in
-  install|upgrade)
+  install)
     [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt
     [ -z "$SALT_USER" ] && SALT_USER=salt
     [ -z "$SALT_NAME" ] && SALT_NAME="Salt"
@@ -8,11 +12,36 @@ case "$1" in
 
     # Reset permissions to fix previous installs
     find ${SALT_HOME} /etc/salt /var/log/salt /var/cache/salt /var/run/salt \
-        \! \( -path /etc/salt/cloud.deploy.d\* -o -path /var/log/salt/cloud -o -path /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy\* \) -a \
-        \( -user ${SALT_USER} -o -group ${SALT_GROUP} \) -exec chown root:root \{\} \;
+        \! \( -path /etc/salt/cloud.deploy.d\* -o -path /var/log/salt/cloud -o -path \
+        /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy\* \) -a \( -user ${SALT_USER} \
+        -o -group ${SALT_GROUP} \) -exec chown ${SALT_USER}:${SALT_GROUP} \{\} \;
 
-  # remove incorrectly installed ufw salt-master directory - issue 57712
-  test -d /etc/ufw/applications.d/salt-master && rm -rf /etc/ufw/applications.d/salt-master || /bin/true
+    ;;
 
-  ;;
+  upgrade)
+    [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt
+    [ -z "$SALT_USER" ] && SALT_USER=salt
+    [ -z "$SALT_NAME" ] && SALT_NAME="Salt"
+    [ -z "$SALT_GROUP" ] && SALT_GROUP=salt
+    PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush();")
+
+    # Reset permissions to fix previous installs
+    CUR_USER=$(ls -dl /run/salt/master | cut -d ' ' -f 3)
+    CUR_GROUP=$(ls -dl /run/salt/master | cut -d ' ' -f 4)
+    db_set salt-master/user $CUR_USER
+    chown -R $CUR_USER:$CUR_GROUP /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master \
+        /var/log/salt/key /var/cache/salt/master /var/run/salt/master
+    if command -v systemctl; then
+        SM_ENABLED=$(systemctl show -p UnitFileState salt-master | cut -d '=' -f 2)
+        db_set salt-master/enabled $SM_ENABLED
+        SM_ACTIVE=$(systemctl is-active salt-master)
+        db_set salt-master/active $SM_ACTIVE
+    else
+        db_set salt-master/enabled enabled
+        db_set salt-master/active active
+    fi
+    ;;
 esac
+
+# remove incorrectly installed ufw salt-master directory - issue 57712
+test -d /etc/ufw/applications.d/salt-master && rm -rf /etc/ufw/applications.d/salt-master || /bin/true
diff --git a/pkg/debian/salt-master.templates b/pkg/debian/salt-master.templates
new file mode 100644
index 00000000000..c0ea8cfd69b
--- /dev/null
+++ b/pkg/debian/salt-master.templates
@@ -0,0 +1,17 @@
+Template: salt-master/user
+Type: string
+Default: salt
+Description: User for salt-master
+  User to run the salt-master process as
+
+Template: salt-master/enabled
+Type: string
+Default: enabled
+Description: Systemd enable state for salt-master
+ default enable state for salt-master systemd state
+
+Template: salt-master/active
+Type: string
+Default: active
+Description: Systemd active state for salt-master
+ default active state for salt-master systemd state
diff --git a/pkg/debian/salt-minion.postinst b/pkg/debian/salt-minion.postinst
new file mode 100644
index 00000000000..13d1cf50901
--- /dev/null
+++ b/pkg/debian/salt-minion.postinst
@@ -0,0 +1,41 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
+case "$1" in
+  configure)
+    db_get salt-minion/user
+    if [ "$RET" != "root" ]; then
+      if [ ! -e "/var/log/salt/minion" ]; then
+        touch /var/log/salt/minion
+        chmod 640 /var/log/salt/minion
+      fi
+      if [ ! -e "/var/log/salt/key" ]; then
+        touch /var/log/salt/key
+        chmod 640 /var/log/salt/key
+      fi
+      chown -R $RET:$RET /etc/salt/pki/minion /etc/salt/minion.d /var/log/salt/minion /var/cache/salt/minion /var/run/salt/minion
+    fi
+    if command -v systemctl; then
+        db_get salt-minion/active
+        RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+        if [ "$RESLT" != 10 ]; then
+            systemctl daemon-reload
+            if [ "$RESLT" = "active" ]; then
+                systemctl restart salt-minion
+            fi
+            db_get salt-minion/enabled
+            RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+            if [ "$RESLT" = "disabled" ]; then
+                systemctl disable salt-minion
+            else
+                systemctl enable salt-minion
+            fi
+        else
+            systemctl daemon-reload
+            systemctl restart salt-minion
+            systemctl enable salt-minion
+        fi
+    fi
+  ;;
+esac
diff --git a/pkg/debian/salt-minion.preinst b/pkg/debian/salt-minion.preinst
new file mode 100644
index 00000000000..51be48e0677
--- /dev/null
+++ b/pkg/debian/salt-minion.preinst
@@ -0,0 +1,29 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
+case "$1" in
+  upgrade)
+    [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt
+    [ -z "$SALT_USER" ] && SALT_USER=salt
+    [ -z "$SALT_NAME" ] && SALT_NAME="Salt"
+    [ -z "$SALT_GROUP" ] && SALT_GROUP=salt
+    PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush();")
+
+    # Reset permissions to fix previous installs
+    CUR_USER=$(ls -dl /run/salt/minion | cut -d ' ' -f 3)
+    CUR_GROUP=$(ls -dl /run/salt/minion | cut -d ' ' -f 4)
+    db_set salt-minion/user $CUR_USER
+    chown -R $CUR_USER:$CUR_GROUP /etc/salt/pki/minion /etc/salt/minion.d /var/log/salt/minion \
+        /var/cache/salt/minion /var/run/salt/minion
+    if command -v systemctl; then
+        SM_ENABLED=$(systemctl show -p UnitFileState salt-minion | cut -d '=' -f 2)
+        db_set salt-minion/enabled $SM_ENABLED
+        SM_ACTIVE=$(systemctl is-active salt-minion)
+        db_set salt-minion/active $SM_ACTIVE
+    else
+        db_set salt-minion/enabled enabled
+        db_set salt-minion/active active
+    fi
+    ;;
+esac
diff --git a/pkg/debian/salt-minion.templates b/pkg/debian/salt-minion.templates
new file mode 100644
index 00000000000..583e027d5d7
--- /dev/null
+++ b/pkg/debian/salt-minion.templates
@@ -0,0 +1,17 @@
+Template: salt-minion/user
+Type: string
+Default: root
+Description: User for salt-minion
+  User to run the salt-minion process as
+
+Template: salt-minion/enabled
+Type: string
+Default: enabled
+Description: Systemd enable state for salt-minion
+ default enable state for salt-minion systemd state
+
+Template: salt-minion/active
+Type: string
+Default: active
+Description: Systemd active state for salt-minion
+ default active state for salt-minion systemd state
diff --git a/pkg/debian/salt-syndic.postinst b/pkg/debian/salt-syndic.postinst
new file mode 100644
index 00000000000..071ba38e185
--- /dev/null
+++ b/pkg/debian/salt-syndic.postinst
@@ -0,0 +1,37 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
+case "$1" in
+  configure)
+    db_get salt-syndic/user
+    if [ "$RET" != "root" ]; then
+      if [ ! -e "/var/log/salt/syndic" ]; then
+        touch /var/log/salt/syndic
+        chmod 640 /var/log/salt/syndic
+      fi
+      chown $RET:$RET /var/log/salt/syndic
+    fi
+    if command -v systemctl; then
+        db_get salt-syndic/active
+        RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+        if [ "$RESLT" != 10 ]; then
+            systemctl daemon-reload
+            if [ "$RESLT" = "active" ]; then
+                systemctl restart salt-syndic
+            fi
+            db_get salt-syndic/enabled
+            RESLT=$(echo "$RET" | cut -d ' ' -f 1)
+            if [ "$RESLT" = "disabled" ]; then
+                systemctl disable salt-syndic
+            else
+                systemctl enable salt-syndic
+            fi
+        else
+            systemctl daemon-reload
+            systemctl restart salt-syndic
+            systemctl enable salt-syndic
+        fi
+    fi
+  ;;
+esac
diff --git a/pkg/debian/salt-syndic.preinst b/pkg/debian/salt-syndic.preinst
new file mode 100644
index 00000000000..da43d779163
--- /dev/null
+++ b/pkg/debian/salt-syndic.preinst
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+. /usr/share/debconf/confmodule
+
+case "$1" in
+  upgrade)
+    [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt
+    [ -z "$SALT_USER" ] && SALT_USER=salt
+    [ -z "$SALT_NAME" ] && SALT_NAME="Salt"
+    [ -z "$SALT_GROUP" ] && SALT_GROUP=salt
+
+    # Reset permissions to fix previous installs
+    CUR_USER=$(ls -dl /run/salt-syndic.pid | cut -d ' ' -f 3)
+    CUR_GROUP=$(ls -dl /run/salt-syndic.pid | cut -d ' ' -f 4)
+    db_set salt-syndic/user $CUR_USER
+    chown -R $CUR_USER:$CUR_GROUP /var/log/salt/syndic
+    if command -v systemctl; then
+        SM_ENABLED=$(systemctl show -p UnitFileState salt-syndic | cut -d '=' -f 2)
+        db_set salt-syndic/enabled $SM_ENABLED
+        SM_ACTIVE=$(systemctl is-active salt-syndic)
+        db_set salt-syndic/active $SM_ACTIVE
+    else
+        db_set salt-syndic/enabled enabled
+        db_set salt-syndic/active active
+    fi
+    ;;
+esac
diff --git a/pkg/debian/salt-syndic.templates b/pkg/debian/salt-syndic.templates
new file mode 100644
index 00000000000..c27859e0a24
--- /dev/null
+++ b/pkg/debian/salt-syndic.templates
@@ -0,0 +1,17 @@
+Template: salt-syndic/user
+Type: string
+Default: salt
+Description: User for salt-syndic
+  User to run the salt-syndic process as
+
+Template: salt-syndic/enabled
+Type: string
+Default: enabled
+Description: Systemd enable state for salt-syndic
+ default enable state for salt-syndic systemd state
+
+Template: salt-syndic/active
+Type: string
+Default: active
+Description: Systemd active state for salt-syndic
+ default active state for salt-syndic systemd state
diff --git a/pkg/macos/pkg-scripts/postinstall b/pkg/macos/pkg-scripts/postinstall
index 3cc98edd1f6..85f44b5f0cd 100755
--- a/pkg/macos/pkg-scripts/postinstall
+++ b/pkg/macos/pkg-scripts/postinstall
@@ -86,55 +86,107 @@ fi
 
 log "Symlink: Creating symlinks for salt..."
 ln -sf "$INSTALL_DIR/salt" "$SBIN_DIR/salt"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-api..."
 ln -sf "$INSTALL_DIR/salt-api" "$SBIN_DIR/salt-api"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-api" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-call..."
 ln -sf "$INSTALL_DIR/salt-call" "$SBIN_DIR/salt-call"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-call" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-cloud..."
 ln -sf "$INSTALL_DIR/salt-cloud" "$SBIN_DIR/salt-cloud"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-cloud" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-cp..."
 ln -sf "$INSTALL_DIR/salt-cp" "$SBIN_DIR/salt-cp"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-cp" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-key..."
 ln -sf "$INSTALL_DIR/salt-key" "$SBIN_DIR/salt-key"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-key" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-master..."
 ln -sf "$INSTALL_DIR/salt-master" "$SBIN_DIR/salt-master"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-master" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-minion..."
 ln -sf "$INSTALL_DIR/salt-minion" "$SBIN_DIR/salt-minion"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-minion" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-proxy..."
 ln -sf "$INSTALL_DIR/salt-proxy" "$SBIN_DIR/salt-proxy"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-proxy" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-run..."
 ln -sf "$INSTALL_DIR/salt-run" "$SBIN_DIR/salt-run"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-run" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for spm..."
 ln -sf "$INSTALL_DIR/spm" "$SBIN_DIR/spm"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-spm" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-ssh..."
 ln -sf "$INSTALL_DIR/salt-ssh" "$SBIN_DIR/salt-ssh"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-ssh" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 log "Symlink: Creating symlinks for salt-syndic..."
 ln -sf "$INSTALL_DIR/salt-syndic" "$SBIN_DIR/salt-syndic"
-log "Symlink: Created Successfully"
+if [ -f "$SBIN_DIR/salt-syndic" ]; then
+    log "Symlink: Created Successfully"
+else
+    log "Symlink: Failed to create symlink"
+fi
 
 #-------------------------------------------------------------------------------
 # Add salt to paths.d
diff --git a/pkg/old/suse/salt-api.spec b/pkg/old/suse/salt-api.spec
index f4675fc2310..361448e9536 100644
--- a/pkg/old/suse/salt-api.spec
+++ b/pkg/old/suse/salt-api.spec
@@ -20,7 +20,7 @@ Version:        0.8.3
 Release:        0
 License:        Apache-2.0
 Summary:        The api for Salt a parallel remote execution system
-Url:            http://saltstack.org/
+Url:            http://saltproject.io/
 Group:          System/Monitoring
 Source0:        http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
 Source1:		salt-api
diff --git a/pkg/old/suse/salt.spec b/pkg/old/suse/salt.spec
index ecc3442b907..bc7f85a8993 100644
--- a/pkg/old/suse/salt.spec
+++ b/pkg/old/suse/salt.spec
@@ -41,7 +41,7 @@ Release:        0
 Summary:        A parallel remote execution system
 License:        Apache-2.0
 Group:          System/Monitoring
-Url:            http://saltstack.org/
+Url:            http://saltproject.io/
 Source0:        http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
 Source1:        README.SUSE
 Source2:        salt-tmpfiles.d
diff --git a/pkg/rpm/salt.bash b/pkg/rpm/salt.bash
deleted file mode 120000
index 98ee56c40cd..00000000000
--- a/pkg/rpm/salt.bash
+++ /dev/null
@@ -1 +0,0 @@
-../common/salt.bash
\ No newline at end of file
diff --git a/pkg/rpm/salt.bash b/pkg/rpm/salt.bash
new file mode 100644
index 00000000000..6363fe14271
--- /dev/null
+++ b/pkg/rpm/salt.bash
@@ -0,0 +1,372 @@
+# TODO: check if --config|-c was used and use configured config file for queries
+# TODO: solve somehow completion for  salt -G pythonversion:[tab]
+#       (not sure what to do with lists)
+# TODO: --range[tab] --   how?
+# TODO: --compound[tab] -- how?
+# TODO: use history to extract some words, esp. if ${cur} is empty
+# TODO: TEST EVERYTHING a lot
+# TODO: is it ok to use '--timeout 2' ?
+
+
+_salt_get_grains(){
+    if [ "$1" = 'local' ] ; then
+        salt-call --log-level=error --out=txt -- grains.ls | sed  's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
+    else
+      salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.ls | sed  's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
+    fi
+}
+
+_salt_get_grain_values(){
+    if [ "$1" = 'local' ] ; then
+        salt-call --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
+    else
+        salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
+    fi
+}
+
+_salt_get_keys(){
+    for type in $*; do
+      # remove header from data:
+      salt-key --no-color -l $type | tail -n+2
+    done
+}
+
+_salt_list_functions(){
+    # salt-call: get all functions on this minion
+    # salt: get all functions on all minions
+    # sed: remove all array overhead and convert to newline separated list
+    # sort: chop out doubled entries, so overhead is minimal later during actual completion
+    if [ "$1" = 'local' ] ; then
+        salt-call --log-level=quiet --out=txt -- sys.list_functions \
+          | sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
+          | sort -u
+    else
+        salt '*' --timeout 2 --hide-timeout --log-level=quiet --out=txt -- sys.list_functions \
+          | sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
+          | sort -u
+    fi
+}
+
+_salt_get_coms() {
+    CACHE_DIR="$HOME/.cache/salt-${1}-comp-cache_functions"
+    local _salt_cache_functions=${SALT_COMP_CACHE_FUNCTIONS:=$CACHE_DIR}
+    local _salt_cache_timeout=${SALT_COMP_CACHE_TIMEOUT:='last hour'}
+
+    if [ ! -d "$(dirname ${_salt_cache_functions})" ]; then
+        mkdir -p "$(dirname ${_salt_cache_functions})"
+    fi
+
+    # Regenerate cache if timed out
+    if [[ "$(stat --format=%Z ${_salt_cache_functions} 2>/dev/null)" -lt "$(date --date="${_salt_cache_timeout}" +%s)" ]]; then
+	_salt_list_functions $1 > "${_salt_cache_functions}"
+    fi
+
+    # filter results, to only print the part to next dot (or end of function)
+    sed 's/^\('${cur}'\(\.\|[^.]*\)\)\?.*/\1/' "${_salt_cache_functions}" | sort -u
+}
+
+_salt(){
+
+    local cur prev opts _salt_grains _salt_coms pprev ppprev
+    COMPREPLY=()
+    cur="${COMP_WORDS[COMP_CWORD]}"
+    prev="${COMP_WORDS[COMP_CWORD-1]}"
+    if [ ${COMP_CWORD} -gt 2 ]; then
+        pprev="${COMP_WORDS[COMP_CWORD-2]}"
+    fi
+    if [ ${COMP_CWORD} -gt 3 ]; then
+        ppprev="${COMP_WORDS[COMP_CWORD-3]}"
+    fi
+
+    opts="-h --help -d --doc --documentation --version --versions-report -c \
+          --config-dir= -v --verbose -t --timeout= -s --static -b --batch= \
+          --batch-size= -E --pcre -L --list -G --grain --grain-pcre -N \
+          --nodegroup -R --range -C --compound -I --pillar \
+          --return= -a --auth= --eauth= --extended-auth= -T --make-token -S \
+          --ipcidr --out=pprint --out=yaml --out=overstatestage --out=json \
+          --out=raw --out=highstate --out=key --out=txt --no-color --out-indent= "
+
+    if [[ "${cur}" == -* ]] ; then
+        COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+        return 0
+    fi
+
+    # 2 special cases for filling up grain values
+    case "${pprev}" in
+    -G|--grain|--grain-pcre)
+    if [ "${cur}" = ":" ]; then
+        COMPREPLY=($(compgen -W "`_salt_get_grain_values ${prev}`"))
+        return 0
+    fi
+    ;;
+    esac
+    case "${ppprev}" in
+    -G|--grain|--grain-pcre)
+        if [ "${prev}" = ":" ]; then
+        COMPREPLY=( $(compgen -W "`_salt_get_grain_values ${pprev}`" -- ${cur}) )
+        return 0
+        fi
+    ;;
+    esac
+
+    if [ "${cur}" = "=" ] && [[ "${prev}" == --* ]]; then
+       cur=""
+    fi
+    if [ "${prev}" = "=" ] && [[ "${pprev}" == --* ]]; then
+       prev="${pprev}"
+    fi
+
+   case "${prev}" in
+
+     -c|--config)
+        COMPREPLY=($(compgen -f -- ${cur}))
+        return 0
+        ;;
+     salt)
+        COMPREPLY=($(compgen -W "\'*\' ${opts} $(_salt_get_keys acc)" -- ${cur}))
+        return 0
+        ;;
+     -E|--pcre)
+        COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
+        return 0
+        ;;
+     -G|--grain|--grain-pcre)
+        COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
+        return 0
+        ;;
+     -C|--compound)
+        COMPREPLY=() # TODO: finish this one? how?
+        return 0
+        ;;
+     -t|--timeout)
+        COMPREPLY=($( compgen -W "1 2 3 4 5 6 7 8 9 10 15 20 30 40 60 90 120 180" -- ${cur}))
+        return 0
+        ;;
+     -b|--batch|--batch-size)
+        COMPREPLY=($(compgen -W "1 2 3 4 5 6 7 8 9 10 15 20 30 40 50 60 70 80 90 100 120 150 200"))
+        return 0
+        ;;
+     -N|--nodegroup)
+        MASTER_CONFIG='/etc/salt/master'
+        COMPREPLY=($(compgen -W "`awk -F ':'  'BEGIN {print_line = 0};  /^nodegroups/ {print_line = 1;getline } print_line && /^  */ {print $1} /^[^ ]/ {print_line = 0}' <${MASTER_CONFIG}`" -- ${cur}))
+        return 0
+     ;;
+    esac
+
+    _salt_coms=$(_salt_get_coms remote)
+
+    # If there are still dots in the suggestion, do not append space
+    grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
+
+    all="${opts} ${_salt_coms}"
+    COMPREPLY=( $(compgen -W "${all}" -- ${cur}) )
+
+  return 0
+}
+
+complete -F _salt salt
+
+
+_saltkey(){
+    local cur prev opts prev pprev
+    COMPREPLY=()
+    cur="${COMP_WORDS[COMP_CWORD]}"
+    prev="${COMP_WORDS[COMP_CWORD-1]}"
+    opts="-c --config-dir= -h --help --version --versions-report -q --quiet \
+          -y --yes --gen-keys= --gen-keys-dir= --keysize= --key-logfile= \
+          -l --list= -L --list-all -a --accept= -A --accept-all \
+          -r --reject= -R --reject-all -p --print= -P --print-all \
+          -d --delete= -D --delete-all -f --finger= -F --finger-all \
+          --out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
+          --out=highstate --out=key --out=txt --no-color --out-indent= "
+    if [ ${COMP_CWORD} -gt 2 ]; then
+        pprev="${COMP_WORDS[COMP_CWORD-2]}"
+    fi
+    if [ ${COMP_CWORD} -gt 3 ]; then
+        ppprev="${COMP_WORDS[COMP_CWORD-3]}"
+    fi
+    if [[ "${cur}" == -* ]] ; then
+        COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+        return 0
+    fi
+
+    if [ "${cur}" = "=" ] && [[ "${prev}" == --* ]]; then
+       cur=""
+    fi
+    if [ "${prev}" = "=" ] && [[ "${pprev}" == --* ]]; then
+       prev="${pprev}"
+    fi
+
+    case "${prev}" in
+     -a|--accept)
+        COMPREPLY=($(compgen -W "$(_salt_get_keys un rej)" -- ${cur}))
+        return 0
+      ;;
+     -r|--reject)
+        COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
+        return 0
+        ;;
+     -d|--delete)
+        COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
+        return 0
+        ;;
+     -c|--config)
+        COMPREPLY=($(compgen -f -- ${cur}))
+        return 0
+        ;;
+     --keysize)
+        COMPREPLY=($(compgen -W "2048 3072 4096 5120 6144" -- ${cur}))
+        return 0
+        ;;
+     --gen-keys)
+        return 0
+        ;;
+     --gen-keys-dir)
+        COMPREPLY=($(compgen -d -- ${cur}))
+        return 0
+        ;;
+     -p|--print)
+        COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
+        return 0
+     ;;
+     -l|--list)
+        COMPREPLY=($(compgen -W "pre un acc accepted unaccepted rej rejected all" -- ${cur}))
+        return 0
+     ;;
+     --accept-all)
+        return 0
+     ;;
+    esac
+    COMPREPLY=($(compgen -W "${opts} " -- ${cur}))
+    return 0
+}
+
+complete -F _saltkey salt-key
+
+_saltcall(){
+    local cur prev opts _salt_coms pprev ppprev
+    COMPREPLY=()
+    cur="${COMP_WORDS[COMP_CWORD]}"
+    prev="${COMP_WORDS[COMP_CWORD-1]}"
+    opts="-h --help -d --doc --documentation --version --versions-report \
+          -m --module-dirs= -g --grains --return= --local -c --config-dir= -l --log-level= \
+          --out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
+          --out=highstate --out=key --out=txt --no-color --out-indent= "
+    if [ ${COMP_CWORD} -gt 2 ]; then
+        pprev="${COMP_WORDS[COMP_CWORD-2]}"
+    fi
+    if [ ${COMP_CWORD} -gt 3 ]; then
+        ppprev="${COMP_WORDS[COMP_CWORD-3]}"
+    fi
+    if [[ "${cur}" == -* ]] ; then
+        COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+        return 0
+    fi
+
+    if [ "${cur}" = "=" ] && [[ ${prev} == --* ]]; then
+       cur=""
+    fi
+    if [ "${prev}" = "=" ] && [[ ${pprev} == --* ]]; then
+       prev="${pprev}"
+    fi
+
+    case ${prev} in
+        -m|--module-dirs)
+                COMPREPLY=( $(compgen -d ${cur} ))
+                return 0
+                ;;
+        -l|--log-level)
+                COMPREPLY=( $(compgen -W "info none garbage trace warning error debug" -- ${cur}))
+                return 0
+                ;;
+        -g|grains)
+                return 0
+                ;;
+        salt-call)
+                COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+                return 0
+                ;;
+    esac
+
+    _salt_coms=$(_salt_get_coms local)
+
+    # If there are still dots in the suggestion, do not append space
+    grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
+
+    COMPREPLY=( $(compgen -W "${opts} ${_salt_coms}" -- ${cur} ))
+    return 0
+}
+
+complete -F _saltcall salt-call
+
+
+_saltcp(){
+    local cur prev opts target prefpart postpart helper filt pprev ppprev
+    COMPREPLY=()
+    cur="${COMP_WORDS[COMP_CWORD]}"
+    prev="${COMP_WORDS[COMP_CWORD-1]}"
+    opts="-t --timeout= -s --static -b --batch= --batch-size= \
+          -h --help --version --versions-report -c --config-dir= \
+          -E --pcre -L --list -G --grain --grain-pcre -N --nodegroup \
+          -R --range -C --compound -I --pillar \
+          --out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
+          --out=highstate --out=key --out=txt --no-color --out-indent= "
+    if [[ "${cur}" == -* ]] ; then
+        COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+        return 0
+    fi
+
+    if [ "${cur}" = "=" ] && [[ "${prev}" == --* ]]; then
+       cur=""
+    fi
+    if [ "${prev}" = "=" ] && [[ "${pprev}" == --* ]]; then
+       prev=${pprev}
+    fi
+
+    case ${prev} in
+        salt-cp)
+            COMPREPLY=($(compgen -W "${opts} $(_salt_get_keys acc)" -- ${cur}))
+            return 0
+            ;;
+        -t|--timeout)
+            # those numbers are just a hint
+            COMPREPLY=($(compgen -W "2 3 4 8 10 15 20 25 30 40 60 90 120 180 240 300" -- ${cur} ))
+            return 0
+            ;;
+    -E|--pcre)
+            COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
+            return 0
+            ;;
+    -L|--list)
+            # IMPROVEMENTS ARE WELCOME
+            prefpart="${cur%,*},"
+            postpart=${cur##*,}
+            filt="^\($(echo ${cur}| sed 's:,:\\|:g')\)$"
+            helper=($(_salt_get_keys acc | grep -v "${filt}" | sed "s/^/${prefpart}/"))
+            COMPREPLY=($(compgen -W "${helper[*]}" -- ${cur}))
+            return 0
+            ;;
+    -G|--grain|--grain-pcre)
+            COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
+            return 0
+            ;;
+    # FIXME
+    -R|--range)
+            # FIXME ??
+            return 0
+            ;;
+    -C|--compound)
+            # FIXME ??
+            return 0
+            ;;
+    -c|--config)
+            COMPREPLY=($(compgen -f -- ${cur}))
+            return 0
+            ;;
+    esac
+
+   # default is using opts:
+   COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
+}
+
+complete -F _saltcp salt-cp
diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec
index 637b3f65079..fbac2262a50 100644
--- a/pkg/rpm/salt.spec
+++ b/pkg/rpm/salt.spec
@@ -15,10 +15,18 @@
 %global __requires_exclude_from ^.*$
 %define _source_payload w2.gzdio
 %define _binary_payload w2.gzdio
-%define _SALT_GROUP salt
-%define _SALT_USER salt
-%define _SALT_NAME Salt
-%define _SALT_HOME /opt/saltstack/salt
+%global _SALT_GROUP salt
+%global _SALT_USER salt
+%global _SALT_NAME Salt
+%global _SALT_HOME /opt/saltstack/salt
+
+# salt-master current user and group
+%global _MS_CUR_USER %{_SALT_USER}
+%global _MS_CUR_GROUP %{_SALT_GROUP}
+
+# salt-minion current user and group
+%global _MN_CUR_USER %{_SALT_USER}
+%global _MN_CUR_GROUP %{_SALT_GROUP}
 
 # Disable debugsource template
 %define _debugsource_template %{nil}
@@ -275,7 +283,7 @@ install -p -m 0644 %{_salt_src}/pkg/common/logrotate/salt-common %{buildroot}%{_
 
 # Bash completion
 mkdir -p %{buildroot}%{_sysconfdir}/bash_completion.d/
-install -p -m 0644 %{_salt_src}/pkg/common/salt.bash %{buildroot}%{_sysconfdir}/bash_completion.d/salt.bash
+install -p -m 0644 %{_salt_src}/pkg/rpm/salt.bash %{buildroot}%{_sysconfdir}/bash_completion.d/salt.bash
 
 # Fish completion (TBD remove -v)
 mkdir -p %{buildroot}%{fish_dir}
@@ -425,11 +433,31 @@ usermod -c "%{_SALT_NAME}" \
          %{_SALT_USER}
 
 %pre master
-# Reset permissions to fix previous installs
-PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush();")
-find /etc/salt /opt/saltstack/salt /var/log/salt /var/cache/salt /var/run/salt \
-  \! \( -path /etc/salt/cloud.deploy.d\* -o -path /var/log/salt/cloud -o -path /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy\* \) -a \
-  \( -user salt -o -group salt \) -exec chown root:root \{\} \;
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    _MS_LCUR_USER=$(ls -dl /run/salt/master | cut -d ' ' -f 3)
+    _MS_LCUR_GROUP=$(ls -dl /run/salt/master | cut -d ' ' -f 4)
+    %global _MS_CUR_USER  %{_MS_LCUR_USER}
+    %global _MS_CUR_GROUP %{_MS_LCUR_GROUP}
+fi
+
+%pre syndic
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    _MS_LCUR_USER=$(ls -dl /run/salt/master | cut -d ' ' -f 3)
+    _MS_LCUR_GROUP=$(ls -dl /run/salt/master | cut -d ' ' -f 4)
+    %global _MS_CUR_USER  %{_MS_LCUR_USER}
+    %global _MS_CUR_GROUP %{_MS_LCUR_GROUP}
+fi
+
+%pre minion
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    _MN_LCUR_USER=$(ls -dl /run/salt/minion | cut -d ' ' -f 3)
+    _MN_LCUR_GROUP=$(ls -dl /run/salt/minion | cut -d ' ' -f 4)
+    %global _MN_CUR_USER  %{_MN_LCUR_USER}
+    %global _MN_CUR_GROUP %{_MN_LCUR_GROUP}
+fi
 
 
 # assumes systemd for RHEL 7 & 8 & 9
@@ -444,6 +472,14 @@ if [ $1 -eq 0 ] ; then
   /bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || :
 fi
 
+%preun syndic
+# %%systemd_preun salt-syndic.service
+if [ $1 -eq 0 ] ; then
+  # Package removal, not upgrade
+  /bin/systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || :
+  /bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || :
+fi
+
 %preun minion
 # %%systemd_preun salt-minion.service
 if [ $1 -eq 0 ] ; then
@@ -452,7 +488,6 @@ if [ $1 -eq 0 ] ; then
   /bin/systemctl stop salt-minion.service > /dev/null 2>&1 || :
 fi
 
-
 %preun api
 # %%systemd_preun salt-api.service
 if [ $1 -eq 0 ] ; then
@@ -558,7 +593,12 @@ if [ ! -e "/var/log/salt/cloud" ]; then
   touch /var/log/salt/cloud
   chmod 640 /var/log/salt/cloud
 fi
-chown -R %{_SALT_USER}:%{_SALT_GROUP} /etc/salt/cloud.deploy.d /var/log/salt/cloud /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    chown -R %{_MS_CUR_USER}:%{_MS_CUR_GROUP} /etc/salt/cloud.deploy.d /var/log/salt/cloud /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy
+else
+    chown -R %{_SALT_USER}:%{_SALT_GROUP} /etc/salt/cloud.deploy.d /var/log/salt/cloud /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy
+fi
 
 
 %posttrans master
@@ -570,7 +610,25 @@ if [ ! -e "/var/log/salt/key" ]; then
   touch /var/log/salt/key
   chmod 640 /var/log/salt/key
 fi
-chown -R %{_SALT_USER}:%{_SALT_GROUP} /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master /var/log/salt/key /var/cache/salt/master /var/run/salt/master
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    chown -R %{_MS_CUR_USER}:%{_MS_CUR_GROUP} /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master /var/log/salt/key /var/cache/salt/master /var/run/salt/master
+else
+    chown -R %{_SALT_USER}:%{_SALT_GROUP} /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master /var/log/salt/key /var/cache/salt/master /var/run/salt/master
+fi
+
+
+%posttrans syndic
+if [ ! -e "/var/log/salt/syndic" ]; then
+  touch /var/log/salt/syndic
+  chmod 640 /var/log/salt/syndic
+fi
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    chown -R %{_MS_CUR_USER}:%{_MS_CUR_GROUP} /var/log/salt/syndic
+else
+    chown -R %{_SALT_USER}:%{_SALT_GROUP} /var/log/salt/syndic
+fi
 
 
 %posttrans api
@@ -578,7 +636,26 @@ if [ ! -e "/var/log/salt/api" ]; then
   touch /var/log/salt/api
   chmod 640 /var/log/salt/api
 fi
-chown %{_SALT_USER}:%{_SALT_GROUP} /var/log/salt/api
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    chown -R %{_MS_CUR_USER}:%{_MS_CUR_GROUP} /var/log/salt/api
+else
+    chown -R %{_SALT_USER}:%{_SALT_GROUP} /var/log/salt/api
+fi
+
+%posttrans minion
+if [ ! -e "/var/log/salt/minion" ]; then
+  touch /var/log/salt/minion
+  chmod 640 /var/log/salt/minion
+fi
+if [ ! -e "/var/log/salt/key" ]; then
+  touch /var/log/salt/key
+  chmod 640 /var/log/salt/key
+fi
+if [ $1 -gt 1 ] ; then
+    # Reset permissions to match previous installs - performing upgrade
+    chown -R %{_MN_CUR_USER}:%{_MN_CUR_GROUP} /etc/salt/pki/minion /etc/salt/minion.d /var/log/salt/minion /var/cache/salt/minion /var/run/salt/minion
+fi
 
 
 %preun
@@ -688,6 +765,74 @@ fi
 - Bump to ``jinja2==3.1.4`` due to https://github.com/advisories/GHSA-h75v-3vvj-5mfj [#66488](https://github.com/saltstack/salt/issues/66488)
 
 
+* Mon Jul 29 2024 Salt Project Packaging <saltproject-packaging@vmware.com> - 3006.9
+
+# Deprecated
+
+- Drop CentOS 7 support [#66623](https://github.com/saltstack/salt/issues/66623)
+- No longer build RPM packages with CentOS Stream 9 [#66624](https://github.com/saltstack/salt/issues/66624)
+
+# Fixed
+
+- Made slsutil.renderer work with salt-ssh [#50196](https://github.com/saltstack/salt/issues/50196)
+- Fixed defaults.merge is not available when using salt-ssh [#51605](https://github.com/saltstack/salt/issues/51605)
+- Fixed config.get does not support merge option with salt-ssh [#56441](https://github.com/saltstack/salt/issues/56441)
+- Update to include croniter in pkg requirements [#57649](https://github.com/saltstack/salt/issues/57649)
+- Fixed state.test does not work with salt-ssh [#61100](https://github.com/saltstack/salt/issues/61100)
+- Made slsutil.findup work with salt-ssh [#61143](https://github.com/saltstack/salt/issues/61143)
+- Fixes multiple issues with the cmd module on Windows. Scripts are called using
+  the ``-File`` parameter to the ``powershell.exe`` binary. ``CLIXML`` data in
+  stderr is now removed (only applies to encoded commands). Commands can now be
+  sent to ``cmd.powershell`` as a list. Makes sure JSON data returned is valid.
+  Strips whitespace from the return when using ``runas``. [#61166](https://github.com/saltstack/salt/issues/61166)
+- Fixed the win_lgpo_netsh salt util to handle non-English systems. This was a
+  rewrite to use PowerShell instead of netsh to make the changes on the system [#61534](https://github.com/saltstack/salt/issues/61534)
+- file.replace and file.search work properly with /proc files [#63102](https://github.com/saltstack/salt/issues/63102)
+- Fix utf8 handling in 'pass' renderer [#64300](https://github.com/saltstack/salt/issues/64300)
+- Fixed incorrect version argument will be ignored for multiple package targets warning when using pkgs argument to yumpkg module. [#64563](https://github.com/saltstack/salt/issues/64563)
+- salt-cloud honors root_dir config setting for log_file location and fixes for root_dir locations on windows. [#64728](https://github.com/saltstack/salt/issues/64728)
+- Fixed slsutil.update with salt-ssh during template rendering [#65067](https://github.com/saltstack/salt/issues/65067)
+- Fix config.items when called on minion [#65251](https://github.com/saltstack/salt/issues/65251)
+- Ensure on rpm and deb systems, that user and group for existing Salt, is maintained on upgrade [#65264](https://github.com/saltstack/salt/issues/65264)
+- Fix typo in nftables module to ensure unique nft family values [#65295](https://github.com/saltstack/salt/issues/65295)
+- pkg.installed state aggregate does not honors requires requisite [#65304](https://github.com/saltstack/salt/issues/65304)
+- Added SSH wrapper for logmod [#65630](https://github.com/saltstack/salt/issues/65630)
+- Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM [#65816](https://github.com/saltstack/salt/issues/65816)
+- Corrected x509_v2 CRL creation `last_update` and `next_update` values when system timezone is not UTC [#65837](https://github.com/saltstack/salt/issues/65837)
+- Make sure the root minion process handles SIGUSR1 and emits a traceback like it's child processes [#66095](https://github.com/saltstack/salt/issues/66095)
+- Replaced pyvenv with builtin venv for virtualenv_mod [#66132](https://github.com/saltstack/salt/issues/66132)
+- Made `file.managed` skip download of a remote source if the managed file already exists with the correct hash [#66342](https://github.com/saltstack/salt/issues/66342)
+- Fix win_task ExecutionTimeLimit and result/error code interpretation [#66347](https://github.com/saltstack/salt/issues/66347), [#66441](https://github.com/saltstack/salt/issues/66441)
+- Fixed nftables.build_rule breaks ipv6 rules by using the wrong syntax for source and destination addresses [#66382](https://github.com/saltstack/salt/issues/66382)
+- Fixed x509_v2 certificate.managed crash for locally signed certificates if the signing policy defines signing_private_key [#66414](https://github.com/saltstack/salt/issues/66414)
+- Fixed parallel state execution with Salt-SSH [#66514](https://github.com/saltstack/salt/issues/66514)
+- Fix support for FIPS approved encryption and signing algorithms. [#66579](https://github.com/saltstack/salt/issues/66579)
+- Fix relative file_roots paths [#66588](https://github.com/saltstack/salt/issues/66588)
+- Fixed an issue with cmd.run with requirements when the shell is not the
+  default [#66596](https://github.com/saltstack/salt/issues/66596)
+- Fix RPM package provides [#66604](https://github.com/saltstack/salt/issues/66604)
+- Upgrade relAenv to 0.16.1. This release fixes several package installs for salt-pip [#66632](https://github.com/saltstack/salt/issues/66632)
+- Upgrade relenv to 0.17.0 (https://github.com/saltstack/relenv/blob/v0.17.0/CHANGELOG.md) [#66663](https://github.com/saltstack/salt/issues/66663)
+- Upgrade dependencies due to security issues:
+  - pymysql>=1.1.1
+  - requests>=2.32.0
+  - docker>=7.1.0 [#66666](https://github.com/saltstack/salt/issues/66666)
+- Corrected missed line in branch 3006.x when backporting from PR 61620 and 65044 [#66683](https://github.com/saltstack/salt/issues/66683)
+- Remove debug output from shell scripts for packaging [#66747](https://github.com/saltstack/salt/issues/66747)
+
+# Added
+
+- Add Ubuntu 24.04 support [#66180](https://github.com/saltstack/salt/issues/66180)
+- Add Fedora 40 support, replacing Fedora 39 [#66300](https://github.com/saltstack/salt/issues/66300)
+- Build RPM packages with Rocky Linux 9 (instead of CentOS Stream 9) [#66624](https://github.com/saltstack/salt/issues/66624)
+
+# Security
+
+- Bump to ``jinja2==3.1.4`` due to https://github.com/advisories/GHSA-h75v-3vvj-5mfj [#66488](https://github.com/saltstack/salt/issues/66488)
+- CVE-2024-37088 salt-call will fail with exit code 1 if bad pillar data is
+  encountered. [#66702](https://github.com/saltstack/salt/issues/66702)
+
+
 * Mon Apr 29 2024 Salt Project Packaging <saltproject-packaging@vmware.com> - 3006.8
 
 # Removed
@@ -950,6 +1095,7 @@ fi
 
 # Fixed
 
+- Fix issue with ownership on upgrade of master and minion files
 - Fix an issue with mac_shadow that was causing a command execution error when
   retrieving values that were not yet set. For example, retrieving last login
   before the user had logged in. [#34658](https://github.com/saltstack/salt/issues/34658)
diff --git a/pkg/windows/build_python.ps1 b/pkg/windows/build_python.ps1
index 9257ae79456..47b6c9a641a 100644
--- a/pkg/windows/build_python.ps1
+++ b/pkg/windows/build_python.ps1
@@ -176,13 +176,10 @@ $BUILD_DIR    = "$SCRIPT_DIR\buildenv"
 $RELENV_DIR   = "${env:LOCALAPPDATA}\relenv"
 $SYS_PY_BIN   = (python -c "import sys; print(sys.executable)")
 $BLD_PY_BIN   = "$BUILD_DIR\Scripts\python.exe"
-$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies"
 
 if ( $Architecture -eq "x64" ) {
-    $SALT_DEP_URL = "$SALT_DEP_URL/64"
     $ARCH         = "amd64"
 } else {
-    $SALT_DEP_URL = "$SALT_DEP_URL/32"
     $ARCH         = "x86"
 }
 
@@ -249,7 +246,7 @@ if ( $env:VIRTUAL_ENV ) {
 #-------------------------------------------------------------------------------
 # Installing Relenv
 #-------------------------------------------------------------------------------
-Write-Host "Installing Relenv: " -NoNewLine
+Write-Host "Installing Relenv ($RelenvVersion): " -NoNewLine
 pip install relenv==$RelenvVersion --disable-pip-version-check | Out-Null
 $output = pip list --disable-pip-version-check
 if ("relenv" -in $output.split()) {
diff --git a/pkg/windows/install_nsis.ps1 b/pkg/windows/install_nsis.ps1
index f30abdfbb9a..e225ab2c741 100644
--- a/pkg/windows/install_nsis.ps1
+++ b/pkg/windows/install_nsis.ps1
@@ -46,7 +46,7 @@ $NSIS_DIR     = "${env:ProgramFiles(x86)}\NSIS"
 $NSIS_PLUG_A  = "$NSIS_DIR\Plugins\x86-ansi"
 $NSIS_PLUG_U  = "$NSIS_DIR\Plugins\x86-unicode"
 $NSIS_LIB_DIR = "$NSIS_DIR\Include"
-$DEPS_URL = "https://repo.saltproject.io/windows/dependencies"
+$DEPS_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/nsis"
 
 #-------------------------------------------------------------------------------
 # Start the Script
@@ -68,7 +68,7 @@ if ( Test-Path -Path "$check_file" ) {
     Write-Result "Missing" -ForegroundColor Yellow
 
     Write-Host "Downloading NSIS: " -NoNewline
-    $url = "$DEPS_URL/nsis-3.03-setup.exe"
+    $url = "$DEPS_URL/nsis-3.10-setup.exe"
     $file = "$env:TEMP\install_nsis.exe"
     Invoke-WebRequest -Uri $url -OutFile "$file"
     if ( Test-Path -Path "$file" ) {
diff --git a/pkg/windows/install_salt.ps1 b/pkg/windows/install_salt.ps1
index 3be6e6f18da..670ea38a473 100644
--- a/pkg/windows/install_salt.ps1
+++ b/pkg/windows/install_salt.ps1
@@ -81,11 +81,6 @@ $ARCH          = $(. $PYTHON_BIN -c "import platform; print(platform.architectur
 # Script Variables
 $PROJECT_DIR     = $(git rev-parse --show-toplevel)
 $SALT_DEPS       = "$PROJECT_DIR\requirements\static\pkg\py$PY_VERSION\windows.txt"
-if ( $ARCH -eq "64bit" ) {
-    $SALT_DEP_URL   = "https://repo.saltproject.io/windows/dependencies/64"
-} else {
-    $SALT_DEP_URL   = "https://repo.saltproject.io/windows/dependencies/32"
-}
 
 if ( ! $SkipInstall ) {
   #-------------------------------------------------------------------------------
diff --git a/pkg/windows/msi/build_pkg.ps1 b/pkg/windows/msi/build_pkg.ps1
index 11c531590ea..6eb028f9d76 100644
--- a/pkg/windows/msi/build_pkg.ps1
+++ b/pkg/windows/msi/build_pkg.ps1
@@ -77,7 +77,7 @@ function VerifyOrDownload ($local_file, $URL, $SHA256) {
 #-------------------------------------------------------------------------------
 
 $WEBCACHE_DIR   = "$env:TEMP\msi_build_cache_dir"
-$DEPS_URL       = "https://repo.saltproject.io/windows/dependencies"
+$DEPS_URL       = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/vcredist"
 $PROJECT_DIR    = $(git rev-parse --show-toplevel)
 $BUILD_DIR      = "$PROJECT_DIR\pkg\windows\build"
 $BUILDENV_DIR   = "$PROJECT_DIR\pkg\windows\buildenv"
@@ -90,6 +90,11 @@ $SCRIPT_DIR     = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").Direct
 $RUNTIME_DIR    = [System.Runtime.InteropServices.RuntimeEnvironment]::GetRuntimeDirectory()
 $CSC_BIN        = "$RUNTIME_DIR\csc.exe"
 
+[DateTime]$origin = "1970-01-01 00:00:00"
+$hash_time = $(git show -s --format=%at)
+$TIME_STAMP     = $origin.AddSeconds($hash_time)
+
+
 if ( $BUILD_ARCH -eq "64bit" ) {
     $BUILD_ARCH    = "AMD64"
 } else {
@@ -164,12 +169,12 @@ if ( ! "$env:WIX" ) {
 #-------------------------------------------------------------------------------
 
 $RUNTIMES = @(
-    ("Microsoft_VC143_CRT_x64.msm", "64", "F209B8906063A79B0DFFBB55D3C20AC0A676252DD4F5377CFCD148C409C859EC"),
-    ("Microsoft_VC143_CRT_x86.msm", "32", "B187BD73C7DC0BA353C5D3A6D9D4E63EF72435F8E68273466F30E5496C1A86F7")
+    ("Microsoft_VC143_CRT_x64.msm", "F209B8906063A79B0DFFBB55D3C20AC0A676252DD4F5377CFCD148C409C859EC"),
+    ("Microsoft_VC143_CRT_x86.msm", "B187BD73C7DC0BA353C5D3A6D9D4E63EF72435F8E68273466F30E5496C1A86F7")
 )
 $RUNTIMES | ForEach-Object {
-    $name, $arch, $hash = $_
-    VerifyOrDownload "$WEBCACHE_DIR\$name" "$DEPS_URL/$arch/$name" "$hash"
+    $name, $hash = $_
+    VerifyOrDownload "$WEBCACHE_DIR\$name" "$DEPS_URL/$name" "$hash"
 }
 
 #-------------------------------------------------------------------------------
@@ -515,34 +520,23 @@ $remove | ForEach-Object {
 #-------------------------------------------------------------------------------
 # Set timestamps on Files
 #-------------------------------------------------------------------------------
-# We're doing this on the dlls that were created abive
-
-Write-Host "Getting commit time stamp: " -NoNewline
-[DateTime]$origin = "1970-01-01 00:00:00"
-$hash_time = $(git show -s --format=%at)
-$time_stamp = $origin.AddSeconds($hash_time)
-if ( $hash_time ) {
-    Write-Result "Success" -ForegroundColor Green
-} else {
-    Write-Result "Failed" -ForegroundColor Red
-    exit 1
-}
+# We're doing this on the dlls that were created above
 
 Write-Host "Setting time stamp on all files: " -NoNewline
 $found = Get-ChildItem -Path $BUILDENV_DIR -Recurse
 $found | ForEach-Object {
-    $_.CreationTime = $time_stamp
-    $_.LastAccessTime = $time_stamp
-    $_.LastWriteTime = $time_stamp
+    $_.CreationTime = $TIME_STAMP
+    $_.LastAccessTime = $TIME_STAMP
+    $_.LastWriteTime = $TIME_STAMP
 }
 Write-Result "Success" -ForegroundColor Green
 
 Write-Host "Setting time stamp on installer dlls: " -NoNewline
 $found = Get-ChildItem -Path $SCRIPT_DIR -Filter "*.dll" -Recurse
 $found | ForEach-Object {
-    $_.CreationTime = $time_stamp
-    $_.LastAccessTime = $time_stamp
-    $_.LastWriteTime = $time_stamp
+    $_.CreationTime = $TIME_STAMP
+    $_.LastAccessTime = $TIME_STAMP
+    $_.LastWriteTime = $TIME_STAMP
 }
 Write-Result "Success" -ForegroundColor Green
 
diff --git a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi
index d081dae10b4..a93fd009171 100644
--- a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi
+++ b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi
@@ -544,6 +544,11 @@ Section -install_vcredist_2022
     # Only install 64bit VCRedist on 64bit machines
     # Use RunningX64 here to get the Architecture for the system running the
     # installer.
+    # 2013 >= 21005
+    # 2015 >= 23026
+    # 2017 >= 25008
+    # 2019 >= 27508
+    # 2022 >= 30704
     ${If} ${RunningX64}
         StrCpy $VcRedistName "vcredist_x64_2022"
         StrCpy $VcRedistReg "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64"
@@ -554,9 +559,10 @@ Section -install_vcredist_2022
 
     # Detecting VCRedist Installation
     detailPrint "Checking for $VcRedistName..."
-    ReadRegDword $0 HKLM $VcRedistReg "Installed"
-    StrCmp $0 "1" +2 0
-    Call InstallVCRedist
+    ReadRegDword $0 HKLM $VcRedistReg "Bld"
+    ${If} $0 < 30704
+        Call InstallVCRedist
+    ${EndIf}
 
 SectionEnd
 
diff --git a/pkg/windows/nsis/installer/helper_StrContains.nsh b/pkg/windows/nsis/installer/helper_StrContains.nsh
new file mode 100644
index 00000000000..bea8ac45146
--- /dev/null
+++ b/pkg/windows/nsis/installer/helper_StrContains.nsh
@@ -0,0 +1,52 @@
+#------------------------------------------------------------------------------
+# StrContains
+#
+# This function does a case sensitive searches for an occurrence of a substring in a string.
+# It returns the substring if it is found.
+# Otherwise it returns null("").
+# Written by kenglish_hi
+# Adapted from StrReplace written by dandaman32
+#------------------------------------------------------------------------------
+!define StrContains "!insertmacro StrContains"
+!macro StrContains OUT NEEDLE HAYSTACK
+    Push "${HAYSTACK}"
+    Push "${NEEDLE}"
+    Call StrContains
+    Pop  "${OUT}"
+!macroend
+Function StrContains
+
+    # Initialize variables
+    Var /GLOBAL STR_HAYSTACK
+    Var /GLOBAL STR_NEEDLE
+    Var /GLOBAL STR_CONTAINS_VAR_1
+    Var /GLOBAL STR_CONTAINS_VAR_2
+    Var /GLOBAL STR_CONTAINS_VAR_3
+    Var /GLOBAL STR_CONTAINS_VAR_4
+    Var /GLOBAL STR_RETURN_VAR
+
+    Exch $STR_NEEDLE
+    Exch 1
+    Exch $STR_HAYSTACK
+    # Uncomment to debug
+    #MessageBox MB_OK 'STR_NEEDLE = $STR_NEEDLE STR_HAYSTACK = $STR_HAYSTACK '
+    StrCpy $STR_RETURN_VAR ""
+    StrCpy $STR_CONTAINS_VAR_1 -1
+    StrLen $STR_CONTAINS_VAR_2 $STR_NEEDLE
+    StrLen $STR_CONTAINS_VAR_4 $STR_HAYSTACK
+
+    loop:
+        IntOp $STR_CONTAINS_VAR_1 $STR_CONTAINS_VAR_1 + 1
+        StrCpy $STR_CONTAINS_VAR_3 $STR_HAYSTACK $STR_CONTAINS_VAR_2 $STR_CONTAINS_VAR_1
+        StrCmp $STR_CONTAINS_VAR_3 $STR_NEEDLE found
+        StrCmp $STR_CONTAINS_VAR_1 $STR_CONTAINS_VAR_4 done
+        Goto loop
+
+    found:
+        StrCpy $STR_RETURN_VAR $STR_NEEDLE
+        Goto done
+
+    done:
+        Pop $STR_NEEDLE  # Prevent "invalid opcode" errors and keep the stack clean
+        Exch $STR_RETURN_VAR
+FunctionEnd
diff --git a/pkg/windows/nsis/tests/clean.ps1 b/pkg/windows/nsis/tests/clean.ps1
index 9cc7bb49e8b..a9065d41dcb 100644
--- a/pkg/windows/nsis/tests/clean.ps1
+++ b/pkg/windows/nsis/tests/clean.ps1
@@ -12,8 +12,17 @@ clean.ps1
 clean.ps1
 
 #>
+param(
+    [Parameter(Mandatory=$false)]
+    [Alias("c")]
+# Don't pretify the output of the Write-Result
+    [Switch] $CICD
+)
 
+#-------------------------------------------------------------------------------
 # Script Preferences
+#-------------------------------------------------------------------------------
+
 $ProgressPreference = "SilentlyContinue"
 $ErrorActionPreference = "Stop"
 
@@ -21,15 +30,22 @@ $ErrorActionPreference = "Stop"
 # Script Variables
 #-------------------------------------------------------------------------------
 
-$SCRIPT_DIR   = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName
+$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName
+$PROJECT_DIR   = $(git rev-parse --show-toplevel)
+$WINDOWS_DIR   = "$PROJECT_DIR\pkg\windows"
+$BUILDENV_DIR  = "$WINDOWS_DIR\buildenv"
 
 #-------------------------------------------------------------------------------
 # Script Functions
 #-------------------------------------------------------------------------------
 
 function Write-Result($result, $ForegroundColor="Green") {
-    $position = 80 - $result.Length - [System.Console]::CursorLeft
-    Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
+    if ( $CICD ) {
+        Write-Host $result -ForegroundColor $ForegroundColor
+    } else {
+        $position = 80 - $result.Length - [System.Console]::CursorLeft
+        Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
+    }
 }
 
 #-------------------------------------------------------------------------------
@@ -61,6 +77,51 @@ if ( Test-Path -Path "$SCRIPT_DIR\venv" ) {
         Write-Result "Success" -ForegroundColor Green
     }
 }
+
+#-------------------------------------------------------------------------------
+# Remove buildenv directory
+#-------------------------------------------------------------------------------
+if ( Test-Path -Path "$BUILDENV_DIR" ) {
+    Write-Host "Removing buildenv directory: " -NoNewline
+    Remove-Item -Path "$BUILDENV_DIR" -Recurse -Force
+    if ( Test-Path -Path "$BUILDENV_DIR" ) {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    } else {
+        Write-Result "Success" -ForegroundColor Green
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Make sure processes are not running
+#-------------------------------------------------------------------------------
+$processes = "test-setup",
+             "Un",
+             "Un_A",
+             "Un_B",
+             "Un_C",
+             "Un_D",
+             "Un_E",
+             "Un_F",
+             "Un_G"
+$processes | ForEach-Object {
+    $proc = Get-Process -Name $_ -ErrorAction SilentlyContinue
+    if ( ($null -ne $proc) ) {
+        Write-Host "Killing $($_): " -NoNewline
+        $proc = Get-WmiObject -Class Win32_Process -Filter "Name='$_.exe'"
+        $proc.Terminate() *> $null
+        Start-Sleep -Seconds 5
+        $proc = Get-Process -Name $_ -ErrorAction SilentlyContinue
+        if ( ($null -eq $proc) ) {
+            Write-Result "Success" -ForegroundColor Green
+        } else {
+            Write-Result "Failed" -ForegroundColor Red
+            exit 1
+        }
+    }
+}
+
+
 #-------------------------------------------------------------------------------
 # Remove test-setup.exe
 #-------------------------------------------------------------------------------
@@ -75,6 +136,92 @@ if ( Test-Path -Path "$SCRIPT_DIR\test-setup.exe" ) {
     }
 }
 
+#-------------------------------------------------------------------------------
+# Remove custom_conf
+#-------------------------------------------------------------------------------
+if ( Test-Path -Path "$SCRIPT_DIR\custom_conf" ) {
+    Write-Host "Removing custom_conf: " -NoNewline
+    Remove-Item -Path "$SCRIPT_DIR\custom_conf" -Recurse -Force
+    if ( Test-Path -Path "$SCRIPT_DIR\custom_conf" ) {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    } else {
+        Write-Result "Success" -ForegroundColor Green
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Remove the salt-minion service
+#-------------------------------------------------------------------------------
+if ( $(Get-Service -Name salt-minion -ErrorAction SilentlyContinue).Name ) {
+    Write-Host "Removing salt-minion service" -NoNewline
+    Stop-Service -Name salt-minion
+    $service = Get-WmiObject -Class Win32_Service -Filter "Name='salt-minion'"
+    $service.delete() *> $null
+    if ( $(Get-Service -Name salt-minion -ErrorAction SilentlyContinue).Name ) {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    } else {
+        Write-Result "Success" -ForegroundColor Green
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Remove Salt Project directory from Program Files
+#-------------------------------------------------------------------------------
+if ( Test-Path -Path "$env:ProgramFiles\Salt Project" ) {
+    Write-Host "Removing Salt Project from Program Files: " -NoNewline
+    Remove-Item -Path "$env:ProgramFiles\Salt Project" -Recurse -Force
+    if ( Test-Path -Path "$env:ProgramFiles\Salt Project" ) {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    } else {
+        Write-Result "Success" -ForegroundColor Green
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Remove Salt Project directory from ProgramData
+#-------------------------------------------------------------------------------
+if ( Test-Path -Path "$env:ProgramData\Salt Project" ) {
+    Write-Host "Removing Salt Project from ProgramData: " -NoNewline
+    Remove-Item -Path "$env:ProgramData\Salt Project" -Recurse -Force
+    if ( Test-Path -Path "$env:ProgramData\Salt Project" ) {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    } else {
+        Write-Result "Success" -ForegroundColor Green
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Remove Salt Project from Registry
+#-------------------------------------------------------------------------------
+if ( Test-Path -Path "HKLM:SOFTWARE\Salt Project" ) {
+    Write-Host "Removing Salt Project from Software: " -NoNewline
+    Remove-Item -Path "HKLM:SOFTWARE\Salt Project" -Recurse -Force
+    if ( Test-Path -Path "HKLM:SOFTWARE\Salt Project" ) {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    } else {
+        Write-Result "Success" -ForegroundColor Green
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Remove Salt Minion directory from Registry
+#-------------------------------------------------------------------------------
+if ( Test-Path -Path "HKLM:SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\Salt Minion" ) {
+    Write-Host "Removing Salt Minion from the Uninstall: " -NoNewline
+    Remove-Item -Path "HKLM:SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\Salt Minion" -Recurse -Force
+    if ( Test-Path -Path "HKLM:SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\Salt Minion" ) {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    } else {
+        Write-Result "Success" -ForegroundColor Green
+    }
+}
+
 #-------------------------------------------------------------------------------
 # Script Completed
 #-------------------------------------------------------------------------------
diff --git a/pkg/windows/nsis/tests/config_tests/test_custom_full_path.py b/pkg/windows/nsis/tests/config_tests/test_custom_full_path.py
index 05f186f6c1b..8239b548f21 100644
--- a/pkg/windows/nsis/tests/config_tests/test_custom_full_path.py
+++ b/pkg/windows/nsis/tests/config_tests/test_custom_full_path.py
@@ -6,32 +6,38 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
-    pytest.helpers.custom_config()
-
-    full_path_conf = rf"{pytest.REPO_DIR}\custom_conf"
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", f"/custom-config={full_path_conf}"]
-    )
-    yield
+    full_path_conf = pytest.helpers.custom_config()
+    # Install salt with custom config
+    args = ["/S", f"/custom-config={full_path_conf}"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
-    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+    data_dir = pytest.DATA_DIR
+    data_dir_exists = os.path.exists(data_dir)
+    assert os.path.exists(rf"{data_dir}\conf\minion")
 
 
 def test_config_correct(install):
     # The config file should be the custom config, unchanged
-    with open(rf"{pytest.REPO_DIR}\custom_conf") as f:
+    script_dir = pytest.SCRIPT_DIR
+    script_dir_exists = os.path.exists(script_dir)
+    with open(rf"{script_dir}\custom_conf") as f:
         expected = f.readlines()
 
+    data_dir = pytest.DATA_DIR
+    data_dir_exists = os.path.exists(data_dir)
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
         result = f.readlines()
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_custom_master.py b/pkg/windows/nsis/tests/config_tests/test_custom_master.py
index d332fea1964..d3bcfa65fd5 100644
--- a/pkg/windows/nsis/tests/config_tests/test_custom_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_custom_master.py
@@ -6,19 +6,21 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/custom-config=custom_conf", "/master=cli_master"]
-    )
-    yield
+    # Install salt with custom config
+    args = ["/S", "/custom-config=custom_conf", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_custom_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_custom_master_minion.py
index 0a862c12c72..4f8e4891486 100644
--- a/pkg/windows/nsis/tests/config_tests/test_custom_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_custom_master_minion.py
@@ -6,25 +6,26 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    # Install salt with custom config
+    args = [
+        "/S",
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_custom_minion.py b/pkg/windows/nsis/tests/config_tests/test_custom_minion.py
index 4274defbb5d..36c6595e192 100644
--- a/pkg/windows/nsis/tests/config_tests/test_custom_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_custom_minion.py
@@ -6,24 +6,21 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    # Install salt with custom config
+    args = ["/S", "/custom-config=custom_conf", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_custom_rel_path.py b/pkg/windows/nsis/tests/config_tests/test_custom_rel_path.py
index a54969faf20..7948b04eeba 100644
--- a/pkg/windows/nsis/tests/config_tests/test_custom_rel_path.py
+++ b/pkg/windows/nsis/tests/config_tests/test_custom_rel_path.py
@@ -6,17 +6,21 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/custom-config=custom_conf"])
-    yield
+    # Install salt with custom config
+    args = ["/S", "/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
@@ -25,7 +29,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the custom config, unchanged
-    with open(rf"{pytest.REPO_DIR}\custom_conf") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_default.py b/pkg/windows/nsis/tests/config_tests/test_default.py
index 7681b44e52b..70ad7a1165b 100644
--- a/pkg/windows/nsis/tests/config_tests/test_default.py
+++ b/pkg/windows/nsis/tests/config_tests/test_default.py
@@ -6,13 +6,18 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command([pytest.INST_BIN, "/S"])
-    yield
+    args = ["/S"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
@@ -21,7 +26,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default config, unchanged
-    with open(rf"{pytest.REPO_DIR}\_files\minion") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_default_master.py b/pkg/windows/nsis/tests/config_tests/test_default_master.py
index d978f7b206a..9bdfca3ffe9 100644
--- a/pkg/windows/nsis/tests/config_tests/test_default_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_default_master.py
@@ -6,13 +6,18 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/master=cli_master"])
-    yield
+    args = ["/S", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_default_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_default_master_minion.py
index 4a23c362995..b04896a000f 100644
--- a/pkg/windows/nsis/tests/config_tests/test_default_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_default_master_minion.py
@@ -6,15 +6,18 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/master=cli_master", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/S", "/master=cli_master", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_default_minion.py b/pkg/windows/nsis/tests/config_tests/test_default_minion.py
index c3f165a8ef9..7959c26e29b 100644
--- a/pkg/windows/nsis/tests/config_tests/test_default_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_default_minion.py
@@ -6,13 +6,18 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/minion-name=cli_minion"])
-    yield
+    args = ["/S", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing.py b/pkg/windows/nsis/tests/config_tests/test_existing.py
index 64baced527e..479792d0172 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing.py
@@ -6,17 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S"])
-    yield
+    args = ["/S"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_custom.py b/pkg/windows/nsis/tests/config_tests/test_existing_custom.py
index 6fb147739f2..46c552d0b9e 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_custom.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_custom.py
@@ -6,20 +6,22 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/custom-config=custom_conf"])
-    yield
+    args = ["/S", "/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
@@ -28,7 +30,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the custom config, unchanged
-    with open(rf"{pytest.REPO_DIR}\custom_conf") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_custom_master.py b/pkg/windows/nsis/tests/config_tests/test_existing_custom_master.py
index 78e80cea265..3eb53d45cf1 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_custom_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_custom_master.py
@@ -6,22 +6,22 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/custom-config=custom_conf", "/master=cli_master"]
-    )
-    yield
+    args = ["/S", "/custom-config=custom_conf", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_custom_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_existing_custom_master_minion.py
index 66e36c41262..bb6ad116ca0 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_custom_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_custom_master_minion.py
@@ -6,28 +6,27 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_custom_minion.py b/pkg/windows/nsis/tests/config_tests/test_existing_custom_minion.py
index 545e8219537..a7f8e342452 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_custom_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_custom_minion.py
@@ -8,24 +8,20 @@ def install():
     pytest.helpers.clean_env()
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = ["/S", "/custom-config=custom_conf", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_default.py b/pkg/windows/nsis/tests/config_tests/test_existing_default.py
index aaaa2622e02..fba4e1a1c06 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_default.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_default.py
@@ -6,17 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/default-config"])
-    yield
+    args = ["/S", "/default-config"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
@@ -25,7 +28,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default config, unchanged
-    with open(rf"{pytest.REPO_DIR}\_files\minion") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_default_master.py b/pkg/windows/nsis/tests/config_tests/test_existing_default_master.py
index 456080e4996..dc02133f8c1 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_default_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_default_master.py
@@ -6,19 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/default-config", "/master=cli_master"]
-    )
-    yield
+    args = ["/S", "/default-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_default_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_existing_default_master_minion.py
index 90d262e9b0e..74b95290a8b 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_default_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_default_master_minion.py
@@ -6,25 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/default-config",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = ["/S", "/default-config", "/master=cli_master", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_existing_default_minion.py b/pkg/windows/nsis/tests/config_tests/test_existing_default_minion.py
index 16e41035c66..cdc22e421e9 100644
--- a/pkg/windows/nsis/tests/config_tests/test_existing_default_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_existing_default_minion.py
@@ -6,19 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/default-config", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/S", "/default-config", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(rf"{pytest.INST_DIR}\ssm.exe")
+    # This will show the contents of the directory on failure
+    inst_dir = pytest.INST_DIR
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom.py
index a2fce64fe36..e11895122bc 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom.py
@@ -10,24 +10,19 @@ def inst_dir():
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-
+    pytest.helpers.clean_env()
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            f"/install-dir={inst_dir}",
-            "/custom-config=custom_conf",
-        ]
-    )
-    yield
+    args = ["/S", f"/install-dir={inst_dir}", "/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
@@ -37,7 +32,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the custom config, unchanged
-    with open(rf"{pytest.REPO_DIR}\custom_conf") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master.py
index d47abd1df83..267bf516eb9 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master.py
@@ -10,25 +10,24 @@ def inst_dir():
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-
+    pytest.helpers.clean_env()
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            f"/install-dir={inst_dir}",
-            "/custom-config=custom_conf",
-            "/master=cli_master",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        f"/install-dir={inst_dir}",
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master_minion.py
index bf039af4c20..a6919a822c7 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_master_minion.py
@@ -10,26 +10,25 @@ def inst_dir():
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-
+    pytest.helpers.clean_env()
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            f"/install-dir={inst_dir}",
-            "/custom-config=custom_conf",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        f"/install-dir={inst_dir}",
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_minion.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_minion.py
index fcdf2146e0a..7f46ea88def 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_custom_minion.py
@@ -10,25 +10,24 @@ def inst_dir():
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-
+    pytest.helpers.clean_env()
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            f"/install-dir={inst_dir}",
-            "/custom-config=custom_conf",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        f"/install-dir={inst_dir}",
+        "/custom-config=custom_conf",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_default.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_default.py
index ac3c403e2e6..f5674685243 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_default.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_default.py
@@ -10,13 +10,17 @@ def inst_dir():
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", f"/install-dir={inst_dir}"])
-    yield
+    pytest.helpers.clean_env()
+    args = ["/S", f"/install-dir={inst_dir}"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
@@ -26,7 +30,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default config, unchanged
-    with open(rf"{pytest.REPO_DIR}\_files\minion") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master.py
index 2ac36f87e27..2de9fe76a95 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master.py
@@ -5,20 +5,22 @@ import pytest
 
 @pytest.fixture(scope="module")
 def inst_dir():
-    return r"C:\custom_location"
+    return "C:\\custom_location"
 
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", f"/install-dir={inst_dir}", "/master=cli_master"]
-    )
-    yield
+    pytest.helpers.clean_env()
+    args = ["/S", f"/install-dir={inst_dir}", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master_minion.py
index a2996764dd6..9ec9d329bc2 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_default_master_minion.py
@@ -5,26 +5,27 @@ import pytest
 
 @pytest.fixture(scope="module")
 def inst_dir():
-    return r"C:\custom_location"
+    return "C:\\custom_location"
 
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            f"/install-dir={inst_dir}",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    pytest.helpers.clean_env()
+    args = [
+        "/S",
+        f"/install-dir={inst_dir}",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_default_minion.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_default_minion.py
index 6f8f9dc8590..dbe73e7e24c 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_default_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_default_minion.py
@@ -5,20 +5,22 @@ import pytest
 
 @pytest.fixture(scope="module")
 def inst_dir():
-    return r"C:\custom_location"
+    return "C:\\custom_location"
 
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", f"/install-dir={inst_dir}", "/minion-name=cli_minion"]
-    )
-    yield {"inst_dir": inst_dir}
+    pytest.helpers.clean_env()
+    args = ["/S", f"/install-dir={inst_dir}", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_existing.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_existing.py
index f28a61de692..f785dc2d94c 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_existing.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_existing.py
@@ -5,22 +5,24 @@ import pytest
 
 @pytest.fixture(scope="module")
 def inst_dir():
-    return r"C:\custom_location"
+    return "C:\\custom_location"
 
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
-    pytest.helpers.clean_env(inst_dir)
-
+    pytest.helpers.clean_env()
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", f"/install-dir={inst_dir}"])
-    yield
+    args = ["/S", f"/install-dir={inst_dir}"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env(inst_dir)
 
 
 def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
     assert os.path.exists(rf"{inst_dir}\ssm.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_install_dir_move_old_install.py b/pkg/windows/nsis/tests/config_tests/test_install_dir_move_old_install.py
index a556a2c3a98..b2423996600 100644
--- a/pkg/windows/nsis/tests/config_tests/test_install_dir_move_old_install.py
+++ b/pkg/windows/nsis/tests/config_tests/test_install_dir_move_old_install.py
@@ -5,24 +5,23 @@ import pytest
 
 @pytest.fixture(scope="module")
 def inst_dir():
-    return r"C:\custom_location"
+    return "C:\\custom_location"
 
 
 @pytest.fixture(scope="module")
 def install(inst_dir):
     pytest.helpers.clean_env()
-
     # Create old install
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", f"/install-dir={inst_dir}", "/move-config"]
-    )
-    yield
+    args = ["/S", f"/install-dir={inst_dir}", "/move-config"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
     # Apparently we don't move the binaries even if they pass install-dir
     # TODO: Decide if this is expected behavior
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install.py b/pkg/windows/nsis/tests/config_tests/test_old_install.py
index f4ac2f8204f..2db0aa56e4e 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install.py
@@ -6,20 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S"])
-    yield
+    args = ["/S"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_custom.py b/pkg/windows/nsis/tests/config_tests/test_old_install_custom.py
index 4e3dfdf3f78..3c792052acc 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_custom.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_custom.py
@@ -6,23 +6,22 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/custom-config=custom_conf"])
-    yield
+    args = ["/S", "/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
@@ -32,7 +31,7 @@ def test_config_present_old_location(install):
 
 def test_config_correct(install):
     # The config file should be the custom config, unchanged
-    with open(rf"{pytest.REPO_DIR}\custom_conf") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master.py b/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master.py
index 441221e9841..094642bf899 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master.py
@@ -6,25 +6,22 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/custom-config=custom_conf", "/master=cli_master"]
-    )
-    yield
+    args = ["/S", "/custom-config=custom_conf", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master_minion.py
index 0eb22436351..bee83a7ee9e 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_custom_master_minion.py
@@ -6,31 +6,27 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_custom_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_custom_minion.py
index 0f265fd4fbe..e542f799a4c 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_custom_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_custom_minion.py
@@ -6,25 +6,22 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/custom-config=custom_conf", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/S", "/custom-config=custom_conf", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_default.py b/pkg/windows/nsis/tests/config_tests/test_old_install_default.py
index d3aed4f5fb1..e5a85d6ceb5 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_default.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_default.py
@@ -6,20 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
-    # Create old config
+    # Create old install
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/default-config"])
-    yield
+    args = ["/S", "/default-config"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
@@ -29,7 +29,7 @@ def test_config_present_old_location(install):
 
 def test_config_correct(install):
     # The config file should be the default config, unchanged
-    with open(rf"{pytest.REPO_DIR}\_files\minion") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_default_master.py b/pkg/windows/nsis/tests/config_tests/test_old_install_default_master.py
index e0546759046..998dc23c57f 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_default_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_default_master.py
@@ -6,22 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/default-config", "/master=cli_master"]
-    )
-    yield
+    args = ["/S", "/default-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_default_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_default_master_minion.py
index c4f418a4a19..a08306c7911 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_default_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_default_master_minion.py
@@ -6,28 +6,25 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/default-config",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        "/default-config",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_default_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_default_minion.py
index 563b8125b48..5365800667e 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_default_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_default_minion.py
@@ -6,22 +6,20 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/default-config", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/S", "/default-config", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move.py
index d5328d82b3e..aaf1a10f9f3 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move.py
@@ -6,24 +6,24 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/S", "/move-config"])
-    yield
+    args = ["/S", "/move-config"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom.py
index eadd2fd10fa..11ef683ea3f 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom.py
@@ -6,35 +6,32 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/custom-config=custom_conf", "/move-config"]
-    )
-    yield
+    args = ["/S", "/custom-config=custom_conf", "/move-config"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
 def test_config_correct(install):
     # The config file should be the custom config in the new location, unchanged
-    with open(rf"{pytest.REPO_DIR}\custom_conf") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master.py
index 5d0aa67d52a..9698bcdce4a 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master.py
@@ -6,35 +6,26 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/move-config",
-            "/master=cli_master",
-        ]
-    )
-    yield
+    args = ["/S", "/custom-config=custom_conf", "/move-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master_minion.py
index da3e2916eb6..01ab2117630 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_master_minion.py
@@ -6,36 +6,32 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/move-config",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        "/custom-config=custom_conf",
+        "/move-config",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_minion.py
index 621c840ccd0..e3c4ed35b15 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_custom_minion.py
@@ -6,35 +6,31 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/custom-config=custom_conf",
-            "/move-config",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        "/custom-config=custom_conf",
+        "/move-config",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default.py
index 216a0b40c50..b0151e83b8f 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default.py
@@ -6,32 +6,30 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/move-config", "/default-config"]
-    )
-    yield
+    args = ["/S", "/move-config", "/default-config"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
 def test_config_correct(install):
     # The config file should be the default config in the new location, unchanged
-    with open(rf"{pytest.REPO_DIR}\_files\minion") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
         expected = f.readlines()
 
     with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master.py
index 1d2d2a158c5..73c09cf6851 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master.py
@@ -6,26 +6,24 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/S", "/default-config", "/move-config", "/master=cli_master"]
-    )
-    yield
+    args = ["/S", "/default-config", "/move-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master_minion.py
index a46e1e5243d..95fd52594e1 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_master_minion.py
@@ -6,33 +6,30 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/default-config",
-            "/move-config",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        "/default-config",
+        "/move-config",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
diff --git a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_minion.py b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_minion.py
index 2fdada9f21f..3691b2366d3 100644
--- a/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_minion.py
+++ b/pkg/windows/nsis/tests/config_tests/test_old_install_move_default_minion.py
@@ -6,32 +6,29 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create old config
     pytest.helpers.old_install()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/S",
-            "/default-config",
-            "/move-config",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/S",
+        "/default-config",
+        "/move-config",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
-def test_ssm_present_old_location(install):
-    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
-
-
 def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
     assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
 
 
-def test_config_present_old_location(install):
+def test_config_present_new_location(install):
     assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
 
 
diff --git a/pkg/windows/nsis/tests/conftest.py b/pkg/windows/nsis/tests/conftest.py
index 5fe43bdb428..be44a7c2221 100644
--- a/pkg/windows/nsis/tests/conftest.py
+++ b/pkg/windows/nsis/tests/conftest.py
@@ -1,4 +1,5 @@
 import os
+import re
 import shutil
 import subprocess
 import time
@@ -49,16 +50,33 @@ INST_DIR = r"C:\Program Files\Salt Project\Salt"
 DATA_DIR = r"C:\ProgramData\Salt Project\Salt"
 SYSTEM_DRIVE = os.environ.get("SystemDrive")
 OLD_DIR = f"{SYSTEM_DRIVE}\\salt"
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+INST_BIN = rf"{SCRIPT_DIR}\test-setup.exe"
+PROCESSES = [
+    os.path.basename(INST_BIN),
+    "uninst.exe",
+    "Un.exe",
+    "Un_A.exe",
+    "Un_B.exe",
+    "Un_C.exe",
+    "Un_D.exe",
+    "Un_D.exe",
+    "Un_F.exe",
+    "Un_G.exe",
+]
 
 
 def reg_key_exists(hive=winreg.HKEY_LOCAL_MACHINE, key=None):
+    """
+    Helper function to determine if a registry key exists. It does this by
+    opening the key. If the connection is successful, the key exists. Otherwise
+    an error is returned, which means the key does not exist
+    """
     try:
         with winreg.OpenKey(hive, key, 0, winreg.KEY_READ):
-            exists = True
+            return True
     except:
-        exists = False
-
-    return exists
+        return False
 
 
 def delete_key(hive=winreg.HKEY_LOCAL_MACHINE, key=None):
@@ -66,60 +84,97 @@ def delete_key(hive=winreg.HKEY_LOCAL_MACHINE, key=None):
         parent, _, base = key.rpartition("\\")
         with winreg.OpenKey(hive, parent, 0, winreg.KEY_ALL_ACCESS) as reg:
             winreg.DeleteKey(reg, base)
+    assert not reg_key_exists(hive=hive, key=key)
 
 
 def pytest_configure():
     pytest.DATA_DIR = DATA_DIR
     pytest.INST_DIR = INST_DIR
-    pytest.REPO_DIR = REPO_DIR
     pytest.INST_BIN = INST_BIN
     pytest.OLD_DIR = OLD_DIR
+    pytest.SCRIPT_DIR = SCRIPT_DIR
     pytest.EXISTING_CONTENT = existing_content
     pytest.CUSTOM_CONTENT = custom_content
     pytest.OLD_CONTENT = old_content
 
 
-@pytest.helpers.register
-def clean_env(inst_dir=INST_DIR):
-    # Run uninstaller
-    for uninst_bin in [f"{inst_dir}\\uninst.exe", f"{OLD_DIR}\\uninst.exe"]:
-        if os.path.exists(uninst_bin):
-            run_command([uninst_bin, "/S", "/delete-root-dir", "/delete-install-dir"])
-            # This is needed to avoid a race condition where the uninstall is completing
-            start_time = time.time()
-            while "Un_A.exe" in (p.name() for p in psutil.process_iter()):
-                # Sometimes the Uninstall binary hangs... we'll kill it after 10 seconds
-                if (time.time() - start_time) > 10:
-                    for proc in psutil.process_iter():
-                        if proc.name() == "Un_A.exe":
-                            proc.kill()
-                time.sleep(0.1)
-
-    # This is needed to avoid a race condition where the installer isn't closed
-    start_time = time.time()
-    while os.path.basename(INST_BIN) in (p.name() for p in psutil.process_iter()):
-        if (time.time() - start_time) > 10:
-            # If it's not dead after 10 seconds, kill it
-            for proc in psutil.process_iter():
-                if proc.name() == os.path.basename(INST_BIN):
-                    proc.kill()
-                time.sleep(0.1)
-
+def clean_fragments(inst_dir=INST_DIR):
     # Remove root_dir
     if os.path.exists(DATA_DIR):
         shutil.rmtree(DATA_DIR)
+    assert not os.path.exists(DATA_DIR)
+
     # Remove install dir
     if os.path.exists(inst_dir):
         shutil.rmtree(inst_dir)
+    assert not os.path.exists(inst_dir)
+
     # Remove old salt dir (C:\salt)
     if os.path.exists(OLD_DIR):
         shutil.rmtree(OLD_DIR)
+    assert not os.path.exists(OLD_DIR)
+
     # Remove custom config
-    if os.path.exists(rf"{REPO_DIR}\custom_conf"):
-        os.remove(rf"{REPO_DIR}\custom_conf")
+    if os.path.exists(rf"{SCRIPT_DIR}\custom_conf"):
+        os.remove(rf"{SCRIPT_DIR}\custom_conf")
+    assert not os.path.exists(rf"{SCRIPT_DIR}\custom_conf")
+
     # Remove registry entries
     delete_key(key="SOFTWARE\\Salt Project\\Salt")
+    assert not reg_key_exists(
+        hive=winreg.HKEY_LOCAL_MACHINE, key="SOFTWARE\\Salt Project\\Salt"
+    )
+
     delete_key(key="SOFTWARE\\Salt Project")
+    assert not reg_key_exists(
+        hive=winreg.HKEY_LOCAL_MACHINE, key="SOFTWARE\\Salt Project"
+    )
+
+    return True
+
+
+@pytest.helpers.register
+def clean_env(inst_dir=INST_DIR, timeout=300):
+    # Let's make sure none of the install/uninstall processes are running
+    for proc in PROCESSES:
+        try:
+            assert proc not in (p.name() for p in psutil.process_iter())
+        except psutil.NoSuchProcess:
+            continue
+
+    # Uninstall existing installation
+    # Run the uninstaller.
+    for uninst_bin in [f"{inst_dir}\\uninst.exe", f"{OLD_DIR}\\uninst.exe"]:
+        if os.path.exists(uninst_bin):
+            install_dir = os.path.dirname(uninst_bin)
+            cmd = [f'"{uninst_bin}"', "/S", "/delete-root-dir", "/delete-install-dir"]
+            run_command(cmd)
+
+            # Uninst.exe launches a 2nd binary (Un.exe or Un_*.exe)
+            # Let's get the name of the process
+            proc_name = ""
+            for proc in PROCESSES:
+                try:
+                    if proc in (p.name() for p in psutil.process_iter()):
+                        proc_name = proc
+                except psutil.NoSuchProcess:
+                    continue
+
+            # We need to give the process time to exit
+            if proc_name:
+                elapsed_time = 0
+                while elapsed_time < timeout:
+                    try:
+                        if proc_name not in (p.name() for p in psutil.process_iter()):
+                            break
+                    except psutil.NoSuchProcess:
+                        continue
+                    elapsed_time += 0.1
+                    time.sleep(0.1)
+
+            assert clean_fragments(inst_dir=install_dir)
+
+    return True
 
 
 @pytest.helpers.register
@@ -134,12 +189,15 @@ def existing_config():
 
 @pytest.helpers.register
 def custom_config():
-    if os.path.exists(rf"{REPO_DIR}\custom_conf"):
-        os.remove(rf"{REPO_DIR}\custom_conf")
+    conf_file = rf"{SCRIPT_DIR}\custom_conf"
+    if os.path.exists(conf_file):
+        os.remove(conf_file)
     # Create a custom config
-    with open(rf"{REPO_DIR}\custom_conf", "w") as f:
+    with open(conf_file, "w") as f:
         # \n characters are converted to os.linesep
         f.writelines(custom_content)
+    assert os.path.exists(conf_file)
+    return conf_file
 
 
 @pytest.helpers.register
@@ -158,25 +216,78 @@ def old_install():
     with open(f"{OLD_DIR}\\conf\\minion", "w") as f:
         # \n characters are converted to os.linesep
         f.writelines(old_content)
-    while not (os.path.exists(f"{OLD_DIR}\\bin\\python.exe")):
-        time.sleep(0.1)
-    while not (os.path.exists(f"{OLD_DIR}\\bin\\ssm.exe")):
-        time.sleep(0.1)
-    while not (os.path.exists(f"{OLD_DIR}\\conf\\minion")):
-        time.sleep(0.1)
+
     assert os.path.exists(f"{OLD_DIR}\\bin\\python.exe")
     assert os.path.exists(f"{OLD_DIR}\\bin\\ssm.exe")
     assert os.path.exists(f"{OLD_DIR}\\conf\\minion")
 
 
 @pytest.helpers.register
-def run_command(cmd):
-    result = subprocess.run(cmd, capture_output=True, text=True)
-    return result.stdout.strip().replace("/", "\\")
+def install_salt(args):
+    """
+    Cleans the environment and installs salt with passed arguments
+    """
+    cmd = [f'"{INST_BIN}"']
+    if isinstance(args, str):
+        cmd.append(args)
+    elif isinstance(args, list):
+        cmd.extend(args)
+    else:
+        raise TypeError(f"Invalid args format: {args}")
+    run_command(cmd)
+
+    # Let's make sure none of the install/uninstall processes are running
+    try:
+        assert os.path.basename(INST_BIN) not in (
+            p.name() for p in psutil.process_iter()
+        )
+    except psutil.NoSuchProcess:
+        pass
 
 
-# These are at the bottom because they depend on some of the functions
-REPO_DIR = run_command(["git", "rev-parse", "--show-toplevel"])
-REPO_DIR = rf"{REPO_DIR}\pkg\windows\nsis\tests"
-os.chdir(REPO_DIR)
-INST_BIN = rf"{REPO_DIR}\test-setup.exe"
+def is_file_locked(path):
+    """
+    Try to see if a file is locked
+    """
+    if not (os.path.exists(path)):
+        return False
+    try:
+        f = open(path)
+        f.close()
+    except OSError:
+        return True
+    return False
+
+
+@pytest.helpers.register
+def run_command(cmd_args, timeout=300):
+    if isinstance(cmd_args, list):
+        cmd_args = " ".join(cmd_args)
+
+    bin_file = re.findall(r'"(.*?)"', cmd_args)[0]
+
+    elapsed_time = 0
+    while (
+        os.path.exists(bin_file) and is_file_locked(bin_file) and elapsed_time < timeout
+    ):
+        elapsed_time += 0.1
+        time.sleep(0.1)
+
+    proc = subprocess.Popen(cmd_args, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+
+    elapsed_time = 0
+    while (
+        os.path.exists(bin_file) and is_file_locked(bin_file) and elapsed_time < timeout
+    ):
+        elapsed_time += 0.1
+        time.sleep(0.1)
+
+    try:
+        out, err = proc.communicate(timeout=timeout)
+        assert proc.returncode == 0
+    except subprocess.TimeoutExpired:
+        # This hides the hung installer/uninstaller problem
+        proc.kill()
+        out = "process killed"
+
+    return out
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_full_path.py b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_full_path.py
index 6c543c1ccf9..1c2c160651f 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_full_path.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_full_path.py
@@ -6,14 +6,12 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
-    pytest.helpers.custom_config()
-
-    full_path_conf = f"{pytest.REPO_DIR}\\custom_conf"
-
-    pytest.helpers.run_command([pytest.INST_BIN, f"/custom-config={full_path_conf}"])
-    yield
+    full_path_conf = pytest.helpers.custom_config()
+    # Install salt passing custom-config
+    args = [f"/custom-config={full_path_conf}"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
@@ -27,7 +25,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default, unchanged
-    with open(f"{pytest.REPO_DIR}\\custom_conf") as f:
+    with open(f"{pytest.SCRIPT_DIR}\\custom_conf") as f:
         expected = f.readlines()
 
     with open(f"{pytest.DATA_DIR}\\conf\\minion") as f:
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master.py
index e95df53ff11..a93b7b68dbe 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master.py
@@ -6,14 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/custom-config=custom_conf", "/master=cli_master"]
-    )
-    yield
+    args = ["/custom-config=custom_conf", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master_minion.py
index b6fbc71d778..5ee86d2bb41 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_master_minion.py
@@ -6,19 +6,15 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/custom-config=custom_conf",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_minion.py
index c90bfb159cf..2a91d9fedf0 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_minion.py
@@ -6,14 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/custom-config=custom_conf", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/custom-config=custom_conf", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_rel_path.py b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_rel_path.py
index 61458486a43..bbb949dc2d3 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_custom_rel_path.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_custom_rel_path.py
@@ -6,12 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/custom-config=custom_conf"])
-    yield
+    args = ["/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
@@ -25,7 +24,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default, unchanged
-    with open(f"{pytest.REPO_DIR}\\custom_conf") as f:
+    with open(f"{pytest.SCRIPT_DIR}\\custom_conf") as f:
         expected = f.readlines()
 
     with open(f"{pytest.DATA_DIR}\\conf\\minion") as f:
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_default.py b/pkg/windows/nsis/tests/manual_tests/test_manual_default.py
index 3ad8cbde51f..b3cd13cc038 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_default.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_default.py
@@ -6,8 +6,9 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command([pytest.INST_BIN])
-    yield
+    args = []
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
@@ -21,7 +22,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default, unchanged
-    with open(f"{pytest.REPO_DIR}\\tests\\_files\\minion") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
         expected = f.readlines()
 
     with open(f"{pytest.DATA_DIR}\\conf\\minion") as f:
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_default_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_default_master.py
index 1b819c7db7d..5935981d362 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_default_master.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_default_master.py
@@ -6,13 +6,14 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command([pytest.INST_BIN, "/master=cli_master"])
-    yield
+    args = ["/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
 def test_binaries_present(install):
-    assert os.path.exists(f"{pytest.INST_DIR}\\bsm.exe")
+    assert os.path.exists(f"{pytest.INST_DIR}\\ssm.exe")
 
 
 def test_config_present(install):
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_default_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_default_master_minion.py
index 101238d4623..acd4d313b4f 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_default_master_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_default_master_minion.py
@@ -6,10 +6,9 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/master=cli_master", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/master=cli_master", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_default_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_default_minion.py
index 538b6328968..79d9e452dc1 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_default_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_default_minion.py
@@ -6,8 +6,9 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-    pytest.helpers.run_command([pytest.INST_BIN, "/minion-name=cli_minion"])
-    yield
+    args = ["/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing.py
index 926637081c0..a150adb48a8 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing.py
@@ -6,12 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN])
-    yield
+    args = []
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom.py
index 6d164e98a9c..af6292cf282 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom.py
@@ -6,15 +6,13 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/custom-config=custom_conf"])
-    yield
+    args = ["/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
@@ -28,7 +26,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default, unchanged
-    with open(f"{pytest.REPO_DIR}\\custom_conf") as f:
+    with open(f"{pytest.SCRIPT_DIR}\\custom_conf") as f:
         expected = f.readlines()
 
     with open(f"{pytest.DATA_DIR}\\conf\\minion") as f:
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master.py
index 26d182ca95b..252c5801ff4 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master.py
@@ -6,17 +6,13 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/custom-config=custom_conf", "/master=cli_master"]
-    )
-    yield
+    args = ["/custom-config=custom_conf", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master_minion.py
index 2c8ac4aafee..f2e079f7439 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_master_minion.py
@@ -6,22 +6,17 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/custom-config=custom_conf",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = [
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_minion.py
index 092857b39b9..b31ad3db90b 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_custom_minion.py
@@ -8,14 +8,11 @@ def install():
     pytest.helpers.clean_env()
     # Create an existing config
     pytest.helpers.existing_config()
-
     # Create a custom config
     pytest.helpers.custom_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/custom-config=custom_conf", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/custom-config=custom_conf", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default.py
index 1007fcb97d4..515b835394c 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default.py
@@ -6,12 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command([pytest.INST_BIN, "/default-config"])
-    yield
+    args = ["/default-config"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
@@ -25,7 +24,7 @@ def test_config_present(install):
 
 def test_config_correct(install):
     # The config file should be the default, unchanged
-    with open(f"{pytest.REPO_DIR}\\tests\\_files\\minion") as f:
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
         expected = f.readlines()
 
     with open(f"{pytest.DATA_DIR}\\conf\\minion") as f:
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_master.py
index 81d66801cb5..f054cbfbe9c 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_master.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_master.py
@@ -6,14 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/default-config", "/master=cli_master"]
-    )
-    yield
+    args = ["/default-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manulal_existing_default_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_master_minion.py
similarity index 83%
rename from pkg/windows/nsis/tests/manual_tests/test_manulal_existing_default_master_minion.py
rename to pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_master_minion.py
index 65c7dc8b0df..ee5703740c5 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manulal_existing_default_master_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_master_minion.py
@@ -6,19 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command(
-        [
-            pytest.INST_BIN,
-            "/default-config",
-            "/master=cli_master",
-            "/minion-name=cli_minion",
-        ]
-    )
-    yield
+    args = ["/default-config", "/master=cli_master", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_minion.py
index 09db2177d5f..a5d0f4f6f3b 100644
--- a/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_minion.py
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_existing_default_minion.py
@@ -6,14 +6,11 @@ import pytest
 @pytest.fixture(scope="module")
 def install():
     pytest.helpers.clean_env()
-
     # Create an existing config
     pytest.helpers.existing_config()
-
-    pytest.helpers.run_command(
-        [pytest.INST_BIN, "/default-config", "/minion-name=cli_minion"]
-    )
-    yield
+    args = ["/default-config", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
     pytest.helpers.clean_env()
 
 
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom.py
new file mode 100644
index 00000000000..cd8213b9f39
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom.py
@@ -0,0 +1,41 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = [f"/install-dir={inst_dir}", "/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config, unchanged
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
+        expected = f.readlines()
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_master.py
new file mode 100644
index 00000000000..1ddbd4948af
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_master.py
@@ -0,0 +1,53 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = [
+        f"/install-dir={inst_dir}",
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config with only master set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: custom_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_master_minion.py
new file mode 100644
index 00000000000..eadc478ad40
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_master_minion.py
@@ -0,0 +1,54 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = [
+        f"/install-dir={inst_dir}",
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config with master and minion set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_minion.py
new file mode 100644
index 00000000000..f896ed8c63c
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_custom_minion.py
@@ -0,0 +1,53 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = [
+        f"/install-dir={inst_dir}",
+        "/custom-config=custom_conf",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config with only minion set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: custom_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default.py
new file mode 100644
index 00000000000..c42bb50e02e
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default.py
@@ -0,0 +1,39 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    args = [f"/install-dir={inst_dir}"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config, unchanged
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
+        expected = f.readlines()
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_master.py
new file mode 100644
index 00000000000..291d110f8d9
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_master.py
@@ -0,0 +1,47 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    args = [f"/install-dir={inst_dir}", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config with only master set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Default config from test suite line 2/6\n",
+        "#id:\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_master_minion.py
new file mode 100644
index 00000000000..44c364b9615
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_master_minion.py
@@ -0,0 +1,47 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    args = [f"/install-dir={inst_dir}", "/master=cli_master", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config with master and minion set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Default config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_minion.py
new file mode 100644
index 00000000000..b964852d9dc
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_default_minion.py
@@ -0,0 +1,47 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    args = [f"/install-dir={inst_dir}", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config with just the minion set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "#master: salt\n",
+        "# Default config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_existing.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_existing.py
new file mode 100644
index 00000000000..4f61c24eae3
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_existing.py
@@ -0,0 +1,40 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    # Create an existing config
+    pytest.helpers.existing_config()
+    args = [f"/install-dir={inst_dir}"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env(inst_dir)
+
+
+def test_binaries_present(install, inst_dir):
+    # This will show the contents of the directory on failure
+    inst_dir_exists = os.path.exists(inst_dir)
+    dir_contents = os.listdir(inst_dir)
+    assert os.path.exists(rf"{inst_dir}\ssm.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the existing config, unchanged
+    expected = pytest.EXISTING_CONTENT
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_move_old_install.py b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_move_old_install.py
new file mode 100644
index 00000000000..0ccd54aab99
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_install_dir_move_old_install.py
@@ -0,0 +1,42 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def inst_dir():
+    return "C:\\custom_location"
+
+
+@pytest.fixture(scope="module")
+def install(inst_dir):
+    pytest.helpers.clean_env()
+    # Create old install
+    pytest.helpers.old_install()
+    args = [f"/install-dir={inst_dir}", "/move-config"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the existing config in the new location, unchanged
+    expected = pytest.OLD_CONTENT
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install.py
new file mode 100644
index 00000000000..6afcb0af507
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install.py
@@ -0,0 +1,37 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = []
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the old existing config, unchanged
+    expected = pytest.OLD_CONTENT
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom.py
new file mode 100644
index 00000000000..037a4fa2b44
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom.py
@@ -0,0 +1,40 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = ["/custom-config=custom_conf"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config, unchanged
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
+        expected = f.readlines()
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_master.py
new file mode 100644
index 00000000000..765d378307e
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_master.py
@@ -0,0 +1,48 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = ["/custom-config=custom_conf", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config with only master set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: custom_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_master_minion.py
new file mode 100644
index 00000000000..b247fd33277
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_master_minion.py
@@ -0,0 +1,52 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = [
+        "/custom-config=custom_conf",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config with master and minion set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_minion.py
new file mode 100644
index 00000000000..23fce833cea
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_custom_minion.py
@@ -0,0 +1,48 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = ["/custom-config=custom_conf", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config with only minion set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: custom_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default.py
new file mode 100644
index 00000000000..0f782538e3a
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default.py
@@ -0,0 +1,38 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old install
+    pytest.helpers.old_install()
+    args = ["/default-config"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config, unchanged
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
+        expected = f.readlines()
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_master.py
new file mode 100644
index 00000000000..77085dc6403
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_master.py
@@ -0,0 +1,46 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = ["/default-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config with only master set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Default config from test suite line 2/6\n",
+        "#id:\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_master_minion.py
new file mode 100644
index 00000000000..0b00cb6c992
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_master_minion.py
@@ -0,0 +1,50 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = [
+        "/default-config",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config with master and minion set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Default config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_minion.py
new file mode 100644
index 00000000000..db4d10b1c0c
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_default_minion.py
@@ -0,0 +1,46 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = ["/default-config", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_old_location(install):
+    assert os.path.exists(rf"{pytest.OLD_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default with only minion set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "#master: salt\n",
+        "# Default config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.OLD_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move.py
new file mode 100644
index 00000000000..cfa48e0b716
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move.py
@@ -0,0 +1,37 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = ["/move-config"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the old existing config in the new location, unchanged
+    expected = pytest.OLD_CONTENT
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom.py
new file mode 100644
index 00000000000..b711f78eace
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom.py
@@ -0,0 +1,40 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = ["/custom-config=custom_conf", "/move-config"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config in the new location, unchanged
+    with open(rf"{pytest.SCRIPT_DIR}\custom_conf") as f:
+        expected = f.readlines()
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_master.py
new file mode 100644
index 00000000000..cd2410d311d
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_master.py
@@ -0,0 +1,48 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = ["/custom-config=custom_conf", "/move-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config in the new location with only master set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: custom_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_master_minion.py
new file mode 100644
index 00000000000..32deccfe4cd
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_master_minion.py
@@ -0,0 +1,53 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = [
+        "/custom-config=custom_conf",
+        "/move-config",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config in the new location with master and minion set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_minion.py
new file mode 100644
index 00000000000..6ff619ed9a5
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_custom_minion.py
@@ -0,0 +1,48 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    # Create a custom config
+    pytest.helpers.custom_config()
+    args = ["/custom-config=custom_conf", "/move-config", "/minion-name=cli_minion"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the custom config in the new location with only minion set
+    expected = [
+        "# Custom config from test suite line 1/6\n",
+        "master: custom_master\n",
+        "# Custom config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Custom config from test suite line 3/6\n",
+        "# Custom config from test suite line 4/6\n",
+        "# Custom config from test suite line 5/6\n",
+        "# Custom config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default.py
new file mode 100644
index 00000000000..5a64d7e4b28
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default.py
@@ -0,0 +1,38 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = ["/move-config", "/default-config"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config in the new location, unchanged
+    with open(rf"{pytest.SCRIPT_DIR}\_files\minion") as f:
+        expected = f.readlines()
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_master.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_master.py
new file mode 100644
index 00000000000..bd37b2565fe
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_master.py
@@ -0,0 +1,46 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = ["/default-config", "/move-config", "/master=cli_master"]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config in the new location with only master set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Default config from test suite line 2/6\n",
+        "#id:\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_master_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_master_minion.py
new file mode 100644
index 00000000000..bc8413aea16
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_master_minion.py
@@ -0,0 +1,51 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = [
+        "/default-config",
+        "/move-config",
+        "/master=cli_master",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config in the new location with master and minion set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "master: cli_master\n",
+        "# Default config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_minion.py b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_minion.py
new file mode 100644
index 00000000000..0d90d00f0df
--- /dev/null
+++ b/pkg/windows/nsis/tests/manual_tests/test_manual_old_install_move_default_minion.py
@@ -0,0 +1,50 @@
+import os
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def install():
+    pytest.helpers.clean_env()
+    # Create old config
+    pytest.helpers.old_install()
+    args = [
+        "/default-config",
+        "/move-config",
+        "/minion-name=cli_minion",
+    ]
+    pytest.helpers.install_salt(args)
+    yield args
+    pytest.helpers.clean_env()
+
+
+def test_binaries_present_old_location(install):
+    # This will show the contents of the directory on failure
+    dir_contents = os.listdir(rf"{pytest.OLD_DIR}\bin")
+    # Apparently we don't move the binaries even if they pass install-dir
+    # TODO: Decide if this is expected behavior
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\ssm.exe")
+    assert os.path.exists(rf"{pytest.OLD_DIR}\bin\python.exe")
+
+
+def test_config_present_new_location(install):
+    assert os.path.exists(rf"{pytest.DATA_DIR}\conf\minion")
+
+
+def test_config_correct(install):
+    # The config file should be the default config in the new location with only minion set
+    expected = [
+        "# Default config from test suite line 1/6\n",
+        "#master: salt\n",
+        "# Default config from test suite line 2/6\n",
+        "id: cli_minion\n",
+        "# Default config from test suite line 3/6\n",
+        "# Default config from test suite line 4/6\n",
+        "# Default config from test suite line 5/6\n",
+        "# Default config from test suite line 6/6\n",
+    ]
+
+    with open(rf"{pytest.DATA_DIR}\conf\minion") as f:
+        result = f.readlines()
+
+    assert result == expected
diff --git a/pkg/windows/nsis/tests/pytest.ini b/pkg/windows/nsis/tests/pytest.ini
new file mode 100644
index 00000000000..eea2c180278
--- /dev/null
+++ b/pkg/windows/nsis/tests/pytest.ini
@@ -0,0 +1 @@
+[pytest]
diff --git a/pkg/windows/nsis/tests/quick_setup.ps1 b/pkg/windows/nsis/tests/quick_setup.ps1
new file mode 100644
index 00000000000..5dd752b9661
--- /dev/null
+++ b/pkg/windows/nsis/tests/quick_setup.ps1
@@ -0,0 +1,154 @@
+<#
+.SYNOPSIS
+Script that sets up the environment for testing
+
+.DESCRIPTION
+This script creates the directory structure and files needed build a mock salt
+installer for testing
+
+.EXAMPLE
+setup.ps1
+#>
+param(
+    [Parameter(Mandatory=$false)]
+    [Alias("c")]
+# Don't pretify the output of the Write-Result
+    [Switch] $CICD
+)
+
+#-------------------------------------------------------------------------------
+# Script Preferences
+#-------------------------------------------------------------------------------
+
+$ProgressPreference = "SilentlyContinue"
+$ErrorActionPreference = "Stop"
+
+#-------------------------------------------------------------------------------
+# Script Functions
+#-------------------------------------------------------------------------------
+
+function Write-Result($result, $ForegroundColor="Green") {
+    if ( $CICD ) {
+        Write-Host $result -ForegroundColor $ForegroundColor
+    } else {
+        $position = 80 - $result.Length - [System.Console]::CursorLeft
+        Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Script Variables
+#-------------------------------------------------------------------------------
+
+$PROJECT_DIR   = $(git rev-parse --show-toplevel)
+$SCRIPT_DIR    = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName
+$WINDOWS_DIR   = "$PROJECT_DIR\pkg\windows"
+$NSIS_DIR      = "$WINDOWS_DIR\nsis"
+$BUILDENV_DIR  = "$WINDOWS_DIR\buildenv"
+$NSIS_BIN      = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
+$SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/64/"
+
+#-------------------------------------------------------------------------------
+# Script Start
+#-------------------------------------------------------------------------------
+
+Write-Host $("=" * 80)
+Write-Host "Build Test Environment for NSIS Tests" -ForegroundColor Cyan
+Write-Host $("-" * 80)
+
+#-------------------------------------------------------------------------------
+# Setup Directories
+#-------------------------------------------------------------------------------
+
+$directories = "$BUILDENV_DIR",
+               "$BUILDENV_DIR\configs"
+$directories | ForEach-Object {
+    if ( ! (Test-Path -Path "$_") ) {
+        Write-Host "Creating $_`: " -NoNewline
+        New-Item -Path $_ -ItemType Directory | Out-Null
+        if ( Test-Path -Path "$_" ) {
+            Write-Result "Success"
+        } else {
+            Write-Result "Failed" -ForegroundColor Red
+            exit 1
+        }
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Create binaries
+#-------------------------------------------------------------------------------
+
+$binary_files = @("python.exe")
+$binary_files | ForEach-Object {
+    Write-Host "Creating $_`: " -NoNewline
+    Set-Content -Path "$BUILDENV_DIR\$_" -Value "binary"
+    if ( Test-Path -Path "$BUILDENV_DIR\$_" ) {
+        Write-Result "Success"
+    } else {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    }
+}
+
+# Make sure ssm.exe is present. This is needed for VMtools
+if ( ! (Test-Path -Path "$BUILDENV_DIR\ssm.exe") ) {
+    Write-Host "Copying SSM to Build Env: " -NoNewline
+    Invoke-WebRequest -Uri "$SALT_DEP_URL/ssm-2.24-103-gdee49fc.exe" -OutFile "$BUILDENV_DIR\ssm.exe"
+    if ( Test-Path -Path "$BUILDENV_DIR\ssm.exe" ) {
+        Write-Result "Success" -ForegroundColor Green
+    } else {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    }
+}
+
+#-------------------------------------------------------------------------------
+# Copy Configs
+#-------------------------------------------------------------------------------
+
+Write-Host "Copy testing minion config: " -NoNewline
+Copy-Item -Path "$NSIS_DIR\tests\_files\minion" `
+          -Destination "$BUILDENV_DIR\configs\"
+if ( Test-Path -Path "$BUILDENV_DIR\configs\minion" ) {
+    Write-Result "Success"
+} else {
+    Write-Result "Failed" -ForegroundColor Red
+    exit 1
+}
+
+#-------------------------------------------------------------------------------
+# Build mock installer
+#-------------------------------------------------------------------------------
+Write-Host "Building mock installer: " -NoNewline
+Start-Process -FilePath $NSIS_BIN `
+              -ArgumentList "/DSaltVersion=test", `
+                            "/DPythonArchitecture=AMD64", `
+                            "$NSIS_DIR\installer\Salt-Minion-Setup.nsi" `
+              -Wait -WindowStyle Hidden
+$installer = "$NSIS_DIR\installer\Salt-Minion-test-Py3-AMD64-Setup.exe"
+if ( Test-Path -Path "$installer" ) {
+    Write-Result "Success"
+} else {
+    Write-Result "Failed" -ForegroundColor Red
+    Write-Host "$NSIS_BIN /DSaltVersion=test /DPythonArchitecture=AMD64 $NSIS_DIR\installer\Salt-Minion-Setup.nsi"
+    exit 1
+}
+
+Write-Host "Moving mock installer: " -NoNewline
+$test_installer = "$NSIS_DIR\tests\test-setup.exe"
+Move-Item -Path $installer -Destination "$test_installer" -Force
+if ( Test-Path -Path "$test_installer" ) {
+    Write-Result "Success"
+} else {
+    Write-Result "Failed" -ForegroundColor Red
+    exit 1
+}
+
+#-------------------------------------------------------------------------------
+# Script Complete
+#-------------------------------------------------------------------------------
+
+Write-Host $("-" * 80)
+Write-Host "Build Test Environment for NSIS Tests Complete" -ForegroundColor Cyan
+Write-Host $("=" * 80)
diff --git a/pkg/windows/nsis/tests/setup.ps1 b/pkg/windows/nsis/tests/setup.ps1
index 37ca0f74640..2b136f66600 100644
--- a/pkg/windows/nsis/tests/setup.ps1
+++ b/pkg/windows/nsis/tests/setup.ps1
@@ -9,6 +9,12 @@ installer for testing
 .EXAMPLE
 setup.ps1
 #>
+param(
+    [Parameter(Mandatory=$false)]
+    [Alias("c")]
+# Don't pretify the output of the Write-Result
+    [Switch] $CICD
+)
 
 #-------------------------------------------------------------------------------
 # Script Preferences
@@ -22,8 +28,12 @@ $ErrorActionPreference = "Stop"
 #-------------------------------------------------------------------------------
 
 function Write-Result($result, $ForegroundColor="Green") {
-    $position = 80 - $result.Length - [System.Console]::CursorLeft
-    Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
+    if ( $CICD ) {
+        Write-Host $result -ForegroundColor $ForegroundColor
+    } else {
+        $position = 80 - $result.Length - [System.Console]::CursorLeft
+        Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
+    }
 }
 
 #-------------------------------------------------------------------------------
@@ -37,6 +47,7 @@ $NSIS_DIR      = "$WINDOWS_DIR\nsis"
 $BUILDENV_DIR  = "$WINDOWS_DIR\buildenv"
 $PREREQS_DIR   = "$WINDOWS_DIR\prereqs"
 $NSIS_BIN      = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
+$SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/64/"
 
 #-------------------------------------------------------------------------------
 # Script Start
@@ -71,7 +82,7 @@ $directories | ForEach-Object {
 #-------------------------------------------------------------------------------
 
 $prereq_files = "vcredist_x86_2022.exe",
-                "vcredist_x64_2022.exe",
+                "vcredist_x64_2022.exe"
 $prereq_files | ForEach-Object {
     Write-Host "Creating $_`: " -NoNewline
     Set-Content -Path "$PREREQS_DIR\$_" -Value "binary"
@@ -83,8 +94,8 @@ $prereq_files | ForEach-Object {
     }
 }
 
-$binary_files = "ssm.exe",
-                "python.exe"
+$binary_files = @("python.exe", "ssm.exe")
+
 $binary_files | ForEach-Object {
     Write-Host "Creating $_`: " -NoNewline
     Set-Content -Path "$BUILDENV_DIR\$_" -Value "binary"
@@ -96,11 +107,23 @@ $binary_files | ForEach-Object {
     }
 }
 
+# Make sure ssm.exe is present. This is needed for VMtools
+if ( ! (Test-Path -Path "$BUILDENV_DIR\ssm.exe") ) {
+    Write-Host "Copying SSM to Build Env: " -NoNewline
+    Invoke-WebRequest -Uri "$SALT_DEP_URL/ssm-2.24-103-gdee49fc.exe" -OutFile "$BUILDENV_DIR\ssm.exe"
+    if ( Test-Path -Path "$BUILDENV_DIR\ssm.exe" ) {
+        Write-Result "Success" -ForegroundColor Green
+    } else {
+        Write-Result "Failed" -ForegroundColor Red
+        exit 1
+    }
+}
+
 #-------------------------------------------------------------------------------
 # Copy Configs
 #-------------------------------------------------------------------------------
 
-Write-Host "Copy minion config: " -NoNewline
+Write-Host "Copy testing minion config: " -NoNewline
 Copy-Item -Path "$NSIS_DIR\tests\_files\minion" `
           -Destination "$BUILDENV_DIR\configs\"
 if ( Test-Path -Path "$BUILDENV_DIR\configs\minion" ) {
@@ -124,6 +147,7 @@ if ( Test-Path -Path "$installer" ) {
     Write-Result "Success"
 } else {
     Write-Result "Failed" -ForegroundColor Red
+    Write-Host "$NSIS_BIN /DSaltVersion=test /DPythonArchitecture=AMD64 $NSIS_DIR\installer\Salt-Minion-Setup.nsi"
     exit 1
 }
 
@@ -142,7 +166,7 @@ if ( Test-Path -Path "$test_installer" ) {
 #-------------------------------------------------------------------------------
 
 Write-Host "Setting up venv: " -NoNewline
-python.exe -m venv venv
+python.exe -m venv "$SCRIPT_DIR\venv"
 if ( Test-Path -Path "$SCRIPT_DIR\venv" ) {
     Write-Result "Success"
 } else {
@@ -151,7 +175,7 @@ if ( Test-Path -Path "$SCRIPT_DIR\venv" ) {
 }
 
 Write-Host "Activating venv: " -NoNewline
-.\venv\Scripts\activate
+& $SCRIPT_DIR\venv\Scripts\activate.ps1
 if ( "$env:VIRTUAL_ENV" ) {
     Write-Result "Success"
 } else {
diff --git a/pkg/windows/nsis/tests/stress_tests/test_hang.py b/pkg/windows/nsis/tests/stress_tests/test_hang.py
new file mode 100644
index 00000000000..bea2458a362
--- /dev/null
+++ b/pkg/windows/nsis/tests/stress_tests/test_hang.py
@@ -0,0 +1,26 @@
+import os
+
+import pytest
+
+
+@pytest.fixture
+def install():
+    assert pytest.helpers.clean_env()
+    args = ["/S"]
+    pytest.helpers.install_salt(args)
+    yield args
+    assert pytest.helpers.clean_env()
+
+
+@pytest.mark.parametrize("execution_number", range(100))
+def test_repeatedly_install_uninstall(execution_number, install):
+    # Make sure the binaries exists. If they don't, the install failed
+    assert os.path.exists(
+        f"{pytest.INST_DIR}\\python.exe"
+    ), "Installation failed. `python.exe` not found"
+    assert os.path.exists(
+        f"{pytest.INST_DIR}\\ssm.exe"
+    ), "Installation failed. `ssm.exe` not found"
+    assert os.path.exists(
+        f"{pytest.INST_DIR}\\uninst.exe"
+    ), "Installation failed. `uninst.exe` not found"
diff --git a/pkg/windows/nsis/tests/test.ps1 b/pkg/windows/nsis/tests/test.ps1
index 015c8b8c60e..c386a69acd9 100644
--- a/pkg/windows/nsis/tests/test.ps1
+++ b/pkg/windows/nsis/tests/test.ps1
@@ -8,6 +8,16 @@ This script activates the venv and launches pytest
 .EXAMPLE
 test.ps1
 #>
+param(
+    [Parameter(Mandatory=$false)]
+    [Alias("c")]
+# Don't pretify the output of the Write-Result
+    [Switch] $CICD=$false,
+
+    [Parameter(Mandatory=$false, ValueFromRemainingArguments=$true)]
+# Don't pretify the output of the Write-Result
+    [String]$Tests
+)
 
 #-------------------------------------------------------------------------------
 # Script Preferences
@@ -21,10 +31,20 @@ $ErrorActionPreference = "Stop"
 #-------------------------------------------------------------------------------
 
 function Write-Result($result, $ForegroundColor="Green") {
-    $position = 80 - $result.Length - [System.Console]::CursorLeft
-    Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
+    if ( $CICD ) {
+        Write-Host $result -ForegroundColor $ForegroundColor
+    } else {
+        $position = 80 - $result.Length - [System.Console]::CursorLeft
+        Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
+    }
 }
 
+#-------------------------------------------------------------------------------
+# Script Variables
+#-------------------------------------------------------------------------------
+
+$SCRIPT_DIR    = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName
+
 #-------------------------------------------------------------------------------
 # Script Start
 #-------------------------------------------------------------------------------
@@ -36,9 +56,13 @@ Write-Host $("-" * 80)
 #-------------------------------------------------------------------------------
 # Activating venv
 #-------------------------------------------------------------------------------
+if ( !(Test-Path -Path "$SCRIPT_DIR\venv\Scripts\activate.ps1") ) {
+    Write-Host "Could not find virtual environment"
+    Write-Host "You must run setup.cmd before running this script"
+}
 
 Write-Host "Activating venv: " -NoNewline
-.\venv\Scripts\activate
+& $SCRIPT_DIR\venv\Scripts\activate.ps1
 if ( "$env:VIRTUAL_ENV" ) {
     Write-Result "Success"
 } else {
@@ -46,9 +70,27 @@ if ( "$env:VIRTUAL_ENV" ) {
     exit 1
 }
 
+Write-Host "Setting working directory: " -NoNewline
+Set-Location -Path $SCRIPT_DIR
+if ( $(Get-Location).Path -eq $SCRIPT_DIR ) {
+    Write-Result "Success"
+} else {
+    Write-Result "Failed" -ForegroundColor Red
+    exit 1
+}
+
+Write-Host $("-" * 80)
+Write-Host ""
 Write-Host "Running pytest..."
 Write-Host ""
-pytest -vvv -- .\config_tests\
+
+if ($Tests) {
+    $pytest_args = -join $Tests
+} else {
+    $pytest_args = ".\config_tests\"
+}
+
+pytest -vvv -rPx --showlocals -- $pytest_args
 
 #-------------------------------------------------------------------------------
 # Script Complete
diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1
index a583b17c985..c35b6e65546 100644
--- a/pkg/windows/prep_salt.ps1
+++ b/pkg/windows/prep_salt.ps1
@@ -66,20 +66,20 @@ $PREREQ_DIR     = "$SCRIPT_DIR\prereqs"
 $SCRIPTS_DIR    = "$BUILD_DIR\Scripts"
 $BUILD_CONF_DIR = "$BUILD_DIR\configs"
 $SITE_PKGS_DIR  = "$BUILD_DIR\Lib\site-packages"
-$BUILD_SALT_DIR = "$SITE_PKGS_DIR\salt"
 $PYTHON_BIN     = "$SCRIPTS_DIR\python.exe"
 $PY_VERSION     = [Version]((Get-Command $PYTHON_BIN).FileVersionInfo.ProductVersion)
 $PY_VERSION     = "$($PY_VERSION.Major).$($PY_VERSION.Minor)"
-$ARCH           = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])")
+$PY_ARCH        = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])")
+$DEPS_URL       = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main"
 
-if ( $ARCH -eq "64bit" ) {
-    $ARCH         = "AMD64"
-    $ARCH_X       = "x64"
-    $SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64"
+if ( $PY_ARCH -eq "64bit" ) {
+    $ARCH         = "x64"
+    $SSM_URL      = "$DEPS_URL/ssm/64/ssm-2.24-103-gdee49fc.exe"
+    $VCREDIST_URL = "$DEPS_URL/vcredist"
 } else {
     $ARCH         = "x86"
-    $ARCH_X       = "x86"
-    $SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/32"
+    $SSM_URL      = "$DEPS_URL/ssm/32/ssm-2.24-103-gdee49fc.exe"
+    $VCREDIST_URL = "$DEPS_URL/vcredist"
 }
 
 #-------------------------------------------------------------------------------
@@ -155,14 +155,15 @@ if ( $PKG ) {
 
 # Make sure ssm.exe is present. This is needed for VMtools
 if ( ! (Test-Path -Path "$BUILD_DIR\ssm.exe") ) {
-    Write-Host "Copying SSM to Root: " -NoNewline
-    Invoke-WebRequest -Uri "$SALT_DEP_URL/ssm-2.24-103-gdee49fc.exe" -OutFile "$BUILD_DIR\ssm.exe"
+    Write-Host "Copying SSM $ARCH to Root: " -NoNewline
+    Invoke-WebRequest -Uri "$SSM_URL" -OutFile "$BUILD_DIR\ssm.exe"
     if ( Test-Path -Path "$BUILD_DIR\ssm.exe" ) {
         Write-Result "Success" -ForegroundColor Green
     } else {
         Write-Result "Failed" -ForegroundColor Red
         exit 1
     }
+    Write-Host $SSM_URL
 }
 
 # Copy the multiminion scripts to the Build directory
@@ -185,9 +186,9 @@ $scripts | ForEach-Object {
 
 # Copy VCRedist 2022 to the prereqs directory
 New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null
-Write-Host "Copying VCRedist 2022 $ARCH_X to prereqs: " -NoNewline
-$file = "vcredist_$ARCH_X`_2022.exe"
-Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file"
+Write-Host "Copying VCRedist 2022 $ARCH to prereqs: " -NoNewline
+$file = "vcredist_$ARCH`_2022.exe"
+Invoke-WebRequest -Uri "$VCREDIST_URL\$file" -OutFile "$PREREQ_DIR\$file"
 if ( Test-Path -Path "$PREREQ_DIR\$file" ) {
     Write-Result "Success" -ForegroundColor Green
 } else {
@@ -267,70 +268,20 @@ $directories | ForEach-Object {
     }
 }
 
-Write-Host "Removing __pycache__ directories: " -NoNewline
-$found = Get-ChildItem -Path "$BUILD_DIR" -Filter "__pycache__" -Recurse
-$found | ForEach-Object {
-    Remove-Item -Path "$($_.FullName)" -Recurse -Force
-    if ( Test-Path -Path "$($_.FullName)" ) {
-        Write-Result "Failed" -ForegroundColor Red
-        Write-Host "Failed to remove: $($_.FullName)"
-        exit 1
-    }
-}
-Write-Result "Success" -ForegroundColor Green
-
-# If we try to remove *.pyc with the same Get-ChildItem that we used to remove
-# __pycache__ directories, it won't be able to find them because they are no
-# longer present
-# This probably won't find any *.pyc files, but just in case
-$remove = "*.pyc",
+Write-Host "Removing unneeded files (.pyc, .chm): " -NoNewline
+$remove = "__pycache__",
+          "*.pyc",
           "*.chm"
 $remove | ForEach-Object {
-    Write-Host "Removing unneeded $_ files: " -NoNewline
-    $found = Get-ChildItem -Path "$BUILD_DIR" -Filter $_ -Recurse
+    $found = Get-ChildItem -Path "$BUILD_DIR\$_" -Recurse
     $found | ForEach-Object {
-        Remove-Item -Path "$($_.FullName)" -Recurse -Force
-        if ( Test-Path -Path "$($_.FullName)" ) {
+        Remove-Item -Path "$_" -Recurse -Force
+        if ( Test-Path -Path $_ ) {
             Write-Result "Failed" -ForegroundColor Red
-            Write-Host "Failed to remove: $($_.FullName)"
+            Write-Host "Failed to remove: $_"
             exit 1
         }
     }
-    Write-Result "Success" -ForegroundColor Green
-}
-
-#-------------------------------------------------------------------------------
-# Set timestamps on Files
-#-------------------------------------------------------------------------------
-
-# We're doing this again in this script because we use python above to get the
-# build architecture and that adds back some __pycache__ and *.pyc files
-Write-Host "Getting commit time stamp: " -NoNewline
-[DateTime]$origin = "1970-01-01 00:00:00"
-$hash_time = $(git show -s --format=%at)
-$time_stamp = $origin.AddSeconds($hash_time)
-if ( $hash_time ) {
-    Write-Result "Success" -ForegroundColor Green
-} else {
-    Write-Result "Failed" -ForegroundColor Red
-    exit 1
-}
-
-Write-Host "Setting time stamp on all salt files: " -NoNewline
-$found = Get-ChildItem -Path $BUILD_DIR -Recurse
-$found | ForEach-Object {
-    $_.CreationTime = $time_stamp
-    $_.LastAccessTime = $time_stamp
-    $_.LastWriteTime = $time_stamp
-}
-Write-Result "Success" -ForegroundColor Green
-
-Write-Host "Setting time stamp on all prereq files: " -NoNewline
-$found = Get-ChildItem -Path $PREREQ_DIR -Recurse
-$found | ForEach-Object {
-    $_.CreationTime = $time_stamp
-    $_.LastAccessTime = $time_stamp
-    $_.LastWriteTime = $time_stamp
 }
 Write-Result "Success" -ForegroundColor Green
 
diff --git a/pkg/windows/sign.bat b/pkg/windows/sign.bat
deleted file mode 100644
index 64809a5a160..00000000000
--- a/pkg/windows/sign.bat
+++ /dev/null
@@ -1,219 +0,0 @@
-:: ############################################################################
-::
-::              FILE: sign.bat
-::
-::       DESCRIPTION: Signing and Hashing script for Salt builds on Windows.
-::                    Requires an official Code Signing Certificate and drivers
-::                    installed to sign the files. Generates hashes in MD5 and
-::                    SHA256 in a file of the same name with a `.md5` or
-::                    `.sha256` extension.
-::
-::              NOTE: This script is used internally by SaltStack to sign and
-::                    hash Windows Installer builds and uses resources not
-::                    available to the community, such as SaltStack's Code
-::                    Signing Certificate. It is placed here for version
-::                    control.
-::
-::         COPYRIGHT: (c) 2012-2018 by the SaltStack Team
-::
-::           LICENSE: Apache 2.0
-::      ORGANIZATION: SaltStack, Inc (saltstack.com)
-::           CREATED: 2017
-::
-:: ############################################################################
-::
-:: USAGE: The script must be located in a directory that has the installer
-::        files in a sub-folder named with the major version, ie: `2018.3`.
-::        Insert the key fob that contains the code signing certificate. Run
-::        the script passing the full version: `.\sign.bat 2018.3.1`.
-::
-::        The script will sign the installers and generate the corresponding
-::        hash files. These can then be uploaded to the salt repo.
-::
-::        The files must be in the following format:
-::        <Series>\Salt-Minion-<Version>-<Python Version>-<System Architecture>-Setup.exe
-::        So, for a Salt Minion installer for 2018.3.1 on AMD64 for Python 3
-::        file would be placed in a subdirectory named `2018.3` and the file
-::        would be named: `Salt-Minion-2018.3.1-Py3-AMD64-Setup.exe`. This
-::        is how the file is created by the NSI Script anyway.
-::
-::        You can test the timestamp server with the following command:
-::        curl -i timestamp.digicert.com/timestamp/health
-::
-:: REQUIREMENTS: This script requires the ``signtool.exe`` binary that is a part
-::               of the Windows SDK. To install just the ``signtool.exe``:
-::
-::      OPTION 1:
-::          1. Download the Windows 10 SDK ISO:
-::             https://developer.microsoft.com/en-us/windows/downloads/windows-sdk/
-::          2. Mount the ISO and browse to the ``Installers`` directory
-::          3. Run the ``Windows SDK Signing Tools-x86_en-us.msi``
-::
-::      OPTION 2:
-::          1. Download the Visual Studio BUild Tools:
-::             https://aka.ms/vs/15/release/vs_buildtools.exe
-::          2. Run the following command:
-::             vs_buildtools.exe --quiet --add Microsoft.Component.ClickOnce.MSBuild
-::
-:: ############################################################################
-@ echo off
-if [%1]==[] (
-    echo You must pass a version
-    goto quit
-) else (
-    set "Version=%~1"
-)
-
-set Series=%Version:~0,4%
-
-if not exist .\%Series%\ (
-    echo - Series %Series% is not valid
-    exit 1
-)
-
-:: Sign Installer Files
-echo ===========================================================================
-echo Signing...
-echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-signtool.exe sign /a /t http://timestamp.digicert.com ^
-                     "%Series%\Salt-Minion-%Version%-AMD64-Setup.exe" ^
-                     "%Series%\Salt-Minion-%Version%-x86-Setup.exe" ^
-                     "%Series%\Salt-%Version%-AMD64-Setup.exe" ^
-                     "%Series%\Salt-%Version%-x86-Setup.exe" ^
-                     "%Series%\Salt-%Version%-Py2-AMD64-Setup.exe" ^
-                     "%Series%\Salt-%Version%-Py2-x86-Setup.exe" ^
-                     "%Series%\Salt-%Version%-Py3-AMD64-Setup.exe" ^
-                     "%Series%\Salt-%Version%-Py3-x86-Setup.exe" ^
-                     "%Series%\Salt-Minion-%Version%-Py2-AMD64-Setup.exe" ^
-                     "%Series%\Salt-Minion-%Version%-Py2-x86-Setup.exe" ^
-                     "%Series%\Salt-Minion-%Version%-Py3-AMD64-Setup.exe" ^
-                     "%Series%\Salt-Minion-%Version%-Py3-x86-Setup.exe" ^
-                     "%Series%\Salt-Minion-%Version%-Py3-AMD64.msi" ^
-                     "%Series%\Salt-Minion-%Version%-Py3-x86.msi"
-
-echo %ERRORLEVEL%
-echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-echo Signing Complete
-echo ===========================================================================
-
-:: Create Hash files
-echo ===========================================================================
-echo Creating Hashes...
-echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-set "file_name=Salt-Minion-%Version%-AMD64-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-Minion-%Version%-x86-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-%Version%-AMD64-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-%Version%-x86-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-%Version%-Py2-AMD64-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-%Version%-Py2-x86-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-%Version%-Py3-AMD64-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-%Version%-Py3-x86-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-Minion-%Version%-Py2-AMD64-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-Minion-%Version%-Py2-x86-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-Minion-%Version%-Py3-AMD64-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-Minion-%Version%-Py3-x86-Setup.exe"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-Minion-%Version%-Py3-AMD64.msi"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-set "file_name=Salt-Minion-%Version%-Py3-x86.msi"
-set "file=.\%Series%\%file_name%"
-if exist "%file%" (
-    echo - %file_name%
-    powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
-    powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
-)
-
-echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-echo Hashing Complete
-echo ===========================================================================
-
-:quit
diff --git a/requirements/base.txt b/requirements/base.txt
index 7b22a9faff5..34ea8c2f882 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -6,12 +6,14 @@ msgpack>=1.0.0 ; python_version < '3.13'
 msgpack>=1.1.0 ; python_version >= '3.13'
 PyYAML
 MarkupSafe
-# pin to a version available on all supported python versions so salt-ssh can run on older targets
 networkx
-requests>=2.31.0 ; python_version < '3.8'
-requests>=2.32.0 ; python_version >= '3.8'
+requests<2.32.0 ; python_version < '3.10'
+requests>=2.32.3 ; python_version >= '3.10'
+certifi==2023.07.22; python_version < '3.10'
+certifi>=2024.7.4; python_version >= '3.10'
 distro>=1.0.1
-psutil>=5.0.0
+psutil<6.0.0; python_version <= '3.9'
+psutil>=5.0.0; python_version >= '3.10'
 packaging>=21.3
 looseversion
 tornado>=6.3.3
@@ -38,7 +40,7 @@ rpm-vercmp; sys_platform == 'linux'
 # From old windows.txt requirements file
 gitpython>=3.1.37; sys_platform == 'win32'
 lxml>=4.6.3; sys_platform == 'win32'
-pymssql>=2.2.1; sys_platform == 'win32'
+pymssql>=2.2.11; sys_platform == 'win32'
 pymysql>=1.0.2; sys_platform == 'win32'
 pythonnet>=3.0.4; sys_platform == 'win32' and python_version < '3.13'
 pywin32>=305; sys_platform == 'win32'
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 82f744d426e..d98982090ee 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -1,5 +1,5 @@
 setuptools >= 65.6.3,< 69.0
 setuptools-scm < 8.0.0
-pip >= 23.3,< 24.0
-# Restrict to a version that works on all supported python versions so salt-ssh can run on older targets
 networkx >= 3.0,< 3.2
+pip >= 23.3,< 24.0 ; python_version < '3.12'
+pip >24 ; python_version >= '3.12'
diff --git a/requirements/pytest.txt b/requirements/pytest.txt
index ce8b9569125..a8a26ea3892 100644
--- a/requirements/pytest.txt
+++ b/requirements/pytest.txt
@@ -13,3 +13,7 @@ flaky
 more-itertools
 pyfakefs
 trustme
+pytest-skip-markers >= 1.5.2 ; python_version >= '3.8'
+pytest-skip-markers <= 1.5.1 ; python_version < '3.8'
+pytest-shell-utilities <= 1.9.0; python_version <= '3.9'
+pytest-shell-utilities >= 1.9.7; python_version >= '3.10'
diff --git a/requirements/static/ci/common.in b/requirements/static/ci/common.in
index 7a9629da29a..9aad00959b8 100644
--- a/requirements/static/ci/common.in
+++ b/requirements/static/ci/common.in
@@ -8,7 +8,6 @@ apache-libcloud>=1.5.0; sys_platform != 'win32'
 boto3>=1.21.46
 boto>=2.46.0
 cassandra-driver>=2.0
-certifi>=2022.12.07
 cffi>=1.14.6
 cherrypy>=17.4.1
 clustershell
@@ -35,7 +34,6 @@ pynacl>=1.5.0
 pyinotify>=0.9.6; sys_platform != 'win32' and sys_platform != 'darwin' and platform_system != "openbsd"
 python-etcd>0.4.2
 pyvmomi
-requests
 rfc3987
 sqlparse>=0.4.4
 strict_rfc3339>=0.7
diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt
index ec1ca35992e..adb20457f19 100644
--- a/requirements/static/ci/py3.10/cloud.txt
+++ b/requirements/static/ci/py3.10/cloud.txt
@@ -8,7 +8,7 @@ apache-libcloud==3.7.0 ; sys_platform != "win32"
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   -r requirements/static/ci/cloud.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   requests
@@ -51,7 +51,7 @@ pywinrm==0.4.3
     # via -r requirements/static/ci/cloud.in
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   apache-libcloud
diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt
index a9cd7fc6f11..9cda5d1874a 100644
--- a/requirements/static/ci/py3.10/darwin.txt
+++ b/requirements/static/ci/py3.10/darwin.txt
@@ -60,10 +60,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/darwin.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -315,7 +315,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.10/darwin.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/darwin.txt
     #   -r requirements/base.txt
@@ -378,10 +378,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -442,11 +445,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.10/darwin.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/darwin.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -539,7 +541,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt
index 3eef9acc60d..367514ad7f7 100644
--- a/requirements/static/ci/py3.10/docs.txt
+++ b/requirements/static/ci/py3.10/docs.txt
@@ -16,7 +16,7 @@ autocommand==2.2.2
     #   jaraco.text
 babel==2.12.1
     # via sphinx
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   requests
@@ -119,7 +119,7 @@ pyyaml==6.0.1
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   myst-docutils
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   sphinx
diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt
index 219aa5b23ce..5c94b77ec11 100644
--- a/requirements/static/ci/py3.10/freebsd.txt
+++ b/requirements/static/ci/py3.10/freebsd.txt
@@ -60,10 +60,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/freebsd.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -319,7 +319,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.10/freebsd.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/freebsd.txt
     #   -r requirements/base.txt
@@ -382,10 +382,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -446,11 +449,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.10/freebsd.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/freebsd.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -544,7 +546,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt
index 6be00f6c5bc..24381099e68 100644
--- a/requirements/static/ci/py3.10/lint.txt
+++ b/requirements/static/ci/py3.10/lint.txt
@@ -6,7 +6,7 @@
 #
 astroid==3.1.0
     # via pylint
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   requests
@@ -36,7 +36,7 @@ pylint==3.1.0
     # via
     #   -r requirements/static/ci/lint.in
     #   saltpylint
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.10/linux.txt
     #   docker
diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt
index d2e9043e0d4..e366c4fffdd 100644
--- a/requirements/static/ci/py3.10/linux.txt
+++ b/requirements/static/ci/py3.10/linux.txt
@@ -70,10 +70,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -345,7 +345,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.10/linux.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/linux.txt
     #   -r requirements/base.txt
@@ -416,10 +416,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -490,11 +493,10 @@ redis-py-cluster==2.1.3
     # via -r requirements/static/ci/linux.in
 redis==3.5.3
     # via redis-py-cluster
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -608,7 +610,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt
index 63a79562d8b..7a88b60c379 100644
--- a/requirements/static/ci/py3.10/windows.txt
+++ b/requirements/static/ci/py3.10/windows.txt
@@ -52,10 +52,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/windows.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.16.0
@@ -277,7 +277,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.10/windows.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/windows.txt
     #   -r requirements/base.txt
@@ -310,7 +310,7 @@ pyfakefs==5.3.1
     # via -r requirements/pytest.txt
 pygit2==1.13.1
     # via -r requirements/static/ci/windows.in
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/windows.txt
     #   -r requirements/base.txt
@@ -340,10 +340,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -412,11 +415,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   pytest-salt-factories
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.10/windows.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   docker
     #   etcd3-py
     #   kubernetes
@@ -491,7 +493,6 @@ typing-extensions==4.8.0
     #   inflect
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt
index e77bd18ab3b..df1399d94c9 100644
--- a/requirements/static/ci/py3.11/cloud.txt
+++ b/requirements/static/ci/py3.11/cloud.txt
@@ -8,7 +8,7 @@ apache-libcloud==3.7.0 ; sys_platform != "win32"
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   -r requirements/static/ci/cloud.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   requests
@@ -51,7 +51,7 @@ pywinrm==0.4.3
     # via -r requirements/static/ci/cloud.in
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   apache-libcloud
diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt
index 6d39b8a3b57..6d59ab753c3 100644
--- a/requirements/static/ci/py3.11/darwin.txt
+++ b/requirements/static/ci/py3.11/darwin.txt
@@ -55,10 +55,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/darwin.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -308,7 +308,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.11/darwin.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/darwin.txt
     #   -r requirements/base.txt
@@ -371,10 +371,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -435,11 +438,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.11/darwin.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/darwin.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -530,7 +532,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt
index ffa33dbcf7e..bca5bf5fac9 100644
--- a/requirements/static/ci/py3.11/docs.txt
+++ b/requirements/static/ci/py3.11/docs.txt
@@ -16,7 +16,7 @@ autocommand==2.2.2
     #   jaraco.text
 babel==2.12.1
     # via sphinx
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   requests
@@ -119,7 +119,7 @@ pyyaml==6.0.1
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   myst-docutils
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   sphinx
diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt
index 9a91bde26ab..3d710e0e329 100644
--- a/requirements/static/ci/py3.11/freebsd.txt
+++ b/requirements/static/ci/py3.11/freebsd.txt
@@ -55,10 +55,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/freebsd.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -312,7 +312,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.11/freebsd.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/freebsd.txt
     #   -r requirements/base.txt
@@ -375,10 +375,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -439,11 +442,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.11/freebsd.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/freebsd.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -536,7 +538,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.11/lint.txt b/requirements/static/ci/py3.11/lint.txt
index 150c35069c6..7de66dc7e8d 100644
--- a/requirements/static/ci/py3.11/lint.txt
+++ b/requirements/static/ci/py3.11/lint.txt
@@ -6,7 +6,7 @@
 #
 astroid==3.1.0
     # via pylint
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   requests
@@ -36,7 +36,7 @@ pylint==3.1.0
     # via
     #   -r requirements/static/ci/lint.in
     #   saltpylint
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.11/linux.txt
     #   docker
diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt
index 65b629dd273..805b8beb593 100644
--- a/requirements/static/ci/py3.11/linux.txt
+++ b/requirements/static/ci/py3.11/linux.txt
@@ -65,10 +65,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -336,7 +336,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.11/linux.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/linux.txt
     #   -r requirements/base.txt
@@ -407,10 +407,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -481,11 +484,10 @@ redis-py-cluster==2.1.3
     # via -r requirements/static/ci/linux.in
 redis==3.5.3
     # via redis-py-cluster
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -598,7 +600,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt
index 9cdb986ffac..af9d97d3b72 100644
--- a/requirements/static/ci/py3.11/windows.txt
+++ b/requirements/static/ci/py3.11/windows.txt
@@ -47,10 +47,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/windows.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.16.0
@@ -270,7 +270,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.11/windows.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/windows.txt
     #   -r requirements/base.txt
@@ -303,7 +303,7 @@ pyfakefs==5.3.1
     # via -r requirements/pytest.txt
 pygit2==1.13.1
     # via -r requirements/static/ci/windows.in
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/windows.txt
     #   -r requirements/base.txt
@@ -333,10 +333,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -405,11 +408,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   pytest-salt-factories
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.11/windows.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   docker
     #   etcd3-py
     #   kubernetes
@@ -482,7 +484,6 @@ typing-extensions==4.8.0
     #   inflect
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt
index 8c1366a806a..7a3b91cde0b 100644
--- a/requirements/static/ci/py3.12/cloud.txt
+++ b/requirements/static/ci/py3.12/cloud.txt
@@ -73,11 +73,11 @@ cassandra-driver==3.28.0
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -c requirements/static/ci/py3.12/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -433,7 +433,7 @@ portend==3.1.0
     #   cherrypy
 profitbricks==4.1.3
     # via -r requirements/static/ci/cloud.in
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -c requirements/static/ci/py3.12/linux.txt
@@ -531,13 +531,15 @@ pytest-salt-factories==1.0.1
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -617,12 +619,11 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   pytest-salt-factories
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -764,7 +765,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt
index bdbd44275dd..59a6d022564 100644
--- a/requirements/static/ci/py3.12/darwin.txt
+++ b/requirements/static/ci/py3.12/darwin.txt
@@ -55,10 +55,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/darwin.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -308,7 +308,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.12/darwin.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/darwin.txt
     #   -r requirements/base.txt
@@ -371,10 +371,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -435,11 +438,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.12/darwin.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/darwin.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -530,7 +532,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt
index bb075cd1150..50af05dac4e 100644
--- a/requirements/static/ci/py3.12/docs.txt
+++ b/requirements/static/ci/py3.12/docs.txt
@@ -28,9 +28,10 @@ autocommand==2.2.2
     #   jaraco.text
 babel==2.12.1
     # via sphinx
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
+    #   -r requirements/base.txt
     #   requests
 cffi==1.16.0
     # via
@@ -172,7 +173,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/base.txt
@@ -223,7 +224,7 @@ pyzmq==25.1.2 ; python_version < "3.13"
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/base.txt
diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt
index f3c899fba49..b9a10a8f045 100644
--- a/requirements/static/ci/py3.12/freebsd.txt
+++ b/requirements/static/ci/py3.12/freebsd.txt
@@ -55,10 +55,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/freebsd.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -312,7 +312,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.12/freebsd.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/freebsd.txt
     #   -r requirements/base.txt
@@ -375,10 +375,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -439,11 +442,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.12/freebsd.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/freebsd.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -536,7 +538,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt
index 5567b88443a..769708088f7 100644
--- a/requirements/static/ci/py3.12/lint.txt
+++ b/requirements/static/ci/py3.12/lint.txt
@@ -88,11 +88,11 @@ cassandra-driver==3.28.0
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/static/ci/common.in
-certifi==2023.7.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -c requirements/static/ci/py3.12/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -459,7 +459,7 @@ portend==3.1.0
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -c requirements/static/ci/py3.12/linux.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -c requirements/static/ci/py3.12/linux.txt
@@ -610,12 +610,11 @@ redis==3.5.3
     # via
     #   -c requirements/static/ci/py3.12/linux.txt
     #   redis-py-cluster
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -c requirements/static/ci/py3.12/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt
index 02903d3bc9d..35ab4dd5118 100644
--- a/requirements/static/ci/py3.12/linux.txt
+++ b/requirements/static/ci/py3.12/linux.txt
@@ -65,10 +65,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -336,7 +336,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -r requirements/base.txt
@@ -407,10 +407,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -481,11 +484,10 @@ redis-py-cluster==2.1.3
     # via -r requirements/static/ci/linux.in
 redis==3.5.3
     # via redis-py-cluster
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
@@ -598,7 +600,6 @@ typing-extensions==4.8.0
     #   napalm
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt
index bdecba514e0..5e9b05bc70d 100644
--- a/requirements/static/ci/py3.12/windows.txt
+++ b/requirements/static/ci/py3.12/windows.txt
@@ -47,10 +47,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2024.7.4 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/windows.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.16.0
@@ -270,7 +270,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.12/windows.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/windows.txt
     #   -r requirements/base.txt
@@ -303,7 +303,7 @@ pyfakefs==5.3.1
     # via -r requirements/pytest.txt
 pygit2==1.13.1
     # via -r requirements/static/ci/windows.in
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/windows.txt
     #   -r requirements/base.txt
@@ -333,10 +333,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -405,11 +408,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   pytest-salt-factories
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.12/windows.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   docker
     #   etcd3-py
     #   kubernetes
@@ -482,7 +484,6 @@ typing-extensions==4.8.0
     #   inflect
     #   pydantic
     #   pydantic-core
-    #   pytest-shell-utilities
     #   pytest-system-statistics
 urllib3==1.26.18
     # via
diff --git a/requirements/static/ci/py3.13/cloud.txt b/requirements/static/ci/py3.13/cloud.txt
index a8678e4a6a1..0e97486b1ae 100644
--- a/requirements/static/ci/py3.13/cloud.txt
+++ b/requirements/static/ci/py3.13/cloud.txt
@@ -70,11 +70,11 @@ cassandra-driver==3.29.2
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/static/ci/common.in
-certifi==2024.8.30
+certifi==2024.8.30 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.17.1
@@ -423,7 +423,7 @@ propcache==0.2.0
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
     #   yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
@@ -507,13 +507,15 @@ pytest-salt-factories==1.0.4
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/pytest.txt
-pytest-shell-utilities==1.9.7
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
-pytest-skip-markers==1.5.2
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -597,12 +599,11 @@ referencing==0.35.1
     #   jsonschema-specifications
 requests-ntlm==1.3.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.13/darwin.txt b/requirements/static/ci/py3.13/darwin.txt
index 2d3b8696aa3..53a44f61f05 100644
--- a/requirements/static/ci/py3.13/darwin.txt
+++ b/requirements/static/ci/py3.13/darwin.txt
@@ -53,10 +53,10 @@ cachetools==5.5.0
     # via google-auth
 cassandra-driver==3.29.2
     # via -r requirements/static/ci/common.in
-certifi==2024.8.30
+certifi==2024.8.30 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/darwin.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.17.1
@@ -300,7 +300,7 @@ propcache==0.2.0
     # via
     #   -c requirements/static/ci/../pkg/py3.13/darwin.txt
     #   yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/darwin.txt
     #   -r requirements/base.txt
@@ -353,10 +353,13 @@ pytest-httpserver==1.1.0
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.4
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.9.7
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.2
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -420,11 +423,10 @@ referencing==0.35.1
     # via
     #   jsonschema
     #   jsonschema-specifications
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/darwin.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.13/docs.txt b/requirements/static/ci/py3.13/docs.txt
index 5bc7a5ed4dd..42fb95ac55b 100644
--- a/requirements/static/ci/py3.13/docs.txt
+++ b/requirements/static/ci/py3.13/docs.txt
@@ -28,9 +28,10 @@ autocommand==2.2.2
     #   jaraco.text
 babel==2.16.0
     # via sphinx
-certifi==2024.8.30
+certifi==2024.8.30 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
+    #   -r requirements/base.txt
     #   requests
 cffi==1.17.1
     # via
@@ -172,7 +173,7 @@ propcache==0.2.0
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
     #   yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/base.txt
@@ -215,7 +216,7 @@ pyzmq==26.2.0 ; python_version >= "3.13"
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/base.txt
diff --git a/requirements/static/ci/py3.13/freebsd.txt b/requirements/static/ci/py3.13/freebsd.txt
index af1d573f520..49d74ac24e7 100644
--- a/requirements/static/ci/py3.13/freebsd.txt
+++ b/requirements/static/ci/py3.13/freebsd.txt
@@ -53,10 +53,10 @@ cachetools==5.5.0
     # via google-auth
 cassandra-driver==3.29.2
     # via -r requirements/static/ci/common.in
-certifi==2024.8.30
+certifi==2024.8.30 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/freebsd.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.17.1
@@ -304,7 +304,7 @@ propcache==0.2.0
     # via
     #   -c requirements/static/ci/../pkg/py3.13/freebsd.txt
     #   yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/freebsd.txt
     #   -r requirements/base.txt
@@ -357,10 +357,13 @@ pytest-httpserver==1.1.0
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.4
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.9.7
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.2
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -424,11 +427,10 @@ referencing==0.35.1
     # via
     #   jsonschema
     #   jsonschema-specifications
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/freebsd.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.13/lint.txt b/requirements/static/ci/py3.13/lint.txt
index d4265890af1..bb96c7c4146 100644
--- a/requirements/static/ci/py3.13/lint.txt
+++ b/requirements/static/ci/py3.13/lint.txt
@@ -84,11 +84,11 @@ cassandra-driver==3.29.2
     # via
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/static/ci/common.in
-certifi==2024.8.30
+certifi==2024.8.30 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -448,7 +448,7 @@ propcache==0.2.0
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
     #   yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
@@ -588,12 +588,11 @@ referencing==0.35.1
     #   -c requirements/static/ci/py3.13/linux.txt
     #   jsonschema
     #   jsonschema-specifications
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -c requirements/static/ci/py3.13/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.13/linux.txt b/requirements/static/ci/py3.13/linux.txt
index 5a49ea404db..ea6ee1f0d47 100644
--- a/requirements/static/ci/py3.13/linux.txt
+++ b/requirements/static/ci/py3.13/linux.txt
@@ -63,10 +63,10 @@ cachetools==5.5.0
     # via google-auth
 cassandra-driver==3.29.2
     # via -r requirements/static/ci/common.in
-certifi==2024.8.30
+certifi==2024.8.30 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -328,7 +328,7 @@ propcache==0.2.0
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -r requirements/base.txt
@@ -389,10 +389,13 @@ pytest-httpserver==1.1.0
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.4
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.9.7
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.2
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -465,11 +468,10 @@ referencing==0.35.1
     # via
     #   jsonschema
     #   jsonschema-specifications
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.13/windows.txt b/requirements/static/ci/py3.13/windows.txt
index 24e1d72f045..29be8123ce0 100644
--- a/requirements/static/ci/py3.13/windows.txt
+++ b/requirements/static/ci/py3.13/windows.txt
@@ -49,10 +49,10 @@ cachetools==5.5.0
     # via google-auth
 cassandra-driver==3.29.2
     # via -r requirements/static/ci/common.in
-certifi==2024.8.30
+certifi==2024.8.30 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/windows.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.17.1
@@ -269,7 +269,7 @@ propcache==0.2.0
     # via
     #   -c requirements/static/ci/../pkg/py3.13/windows.txt
     #   yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/windows.txt
     #   -r requirements/base.txt
@@ -322,10 +322,13 @@ pytest-httpserver==1.1.0
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.4
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.9.7
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.2
+pytest-shell-utilities==1.9.7 ; python_version >= "3.10"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -391,11 +394,10 @@ referencing==0.35.1
     #   jsonschema-specifications
 requests-ntlm==1.3.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.13/windows.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   docker
     #   etcd3-py
     #   kubernetes
diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt
index 6f3761c14dc..1ba90f47267 100644
--- a/requirements/static/ci/py3.8/cloud.txt
+++ b/requirements/static/ci/py3.8/cloud.txt
@@ -8,7 +8,7 @@ apache-libcloud==3.7.0 ; sys_platform != "win32"
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   -r requirements/static/ci/cloud.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   requests
@@ -51,7 +51,7 @@ pywinrm==0.4.3
     # via -r requirements/static/ci/cloud.in
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   apache-libcloud
diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt
index ff2a746c40a..8af149f9643 100644
--- a/requirements/static/ci/py3.8/docs.txt
+++ b/requirements/static/ci/py3.8/docs.txt
@@ -16,7 +16,7 @@ autocommand==2.2.2
     #   jaraco.text
 babel==2.12.1
     # via sphinx
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   requests
@@ -128,7 +128,7 @@ pyyaml==6.0.1
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   myst-docutils
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   sphinx
diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt
index ad8082d2422..113efca853f 100644
--- a/requirements/static/ci/py3.8/freebsd.txt
+++ b/requirements/static/ci/py3.8/freebsd.txt
@@ -60,10 +60,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/freebsd.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -323,7 +323,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.8/freebsd.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/freebsd.txt
     #   -r requirements/base.txt
@@ -386,10 +386,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.8.0 ; python_version <= "3.9"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -450,11 +453,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.8/freebsd.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/freebsd.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt
index 085b21ecd1d..30777ac3a8e 100644
--- a/requirements/static/ci/py3.8/lint.txt
+++ b/requirements/static/ci/py3.8/lint.txt
@@ -6,7 +6,7 @@
 #
 astroid==3.1.0
     # via pylint
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   requests
@@ -36,7 +36,7 @@ pylint==3.1.0
     # via
     #   -r requirements/static/ci/lint.in
     #   saltpylint
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.8/linux.txt
     #   docker
diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt
index a1043a5bdbc..bd2b63c8110 100644
--- a/requirements/static/ci/py3.8/linux.txt
+++ b/requirements/static/ci/py3.8/linux.txt
@@ -66,10 +66,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -342,7 +342,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.8/linux.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/linux.txt
     #   -r requirements/base.txt
@@ -413,10 +413,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.8.0 ; python_version <= "3.9"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -486,11 +489,10 @@ redis-py-cluster==2.1.3
     # via -r requirements/static/ci/linux.in
 redis==3.5.3
     # via redis-py-cluster
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt
index 292fb22a252..7b4bd42d432 100644
--- a/requirements/static/ci/py3.8/windows.txt
+++ b/requirements/static/ci/py3.8/windows.txt
@@ -52,10 +52,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/windows.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.16.0
@@ -281,7 +281,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.8/windows.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/windows.txt
     #   -r requirements/base.txt
@@ -314,7 +314,7 @@ pyfakefs==5.3.1
     # via -r requirements/pytest.txt
 pygit2==1.13.1
     # via -r requirements/static/ci/windows.in
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/windows.txt
     #   -r requirements/base.txt
@@ -344,10 +344,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.8.0 ; python_version <= "3.9"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -417,11 +420,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   pytest-salt-factories
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.8/windows.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   docker
     #   etcd3-py
     #   kubernetes
diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt
index cb37f9685e8..96810387a60 100644
--- a/requirements/static/ci/py3.9/cloud.txt
+++ b/requirements/static/ci/py3.9/cloud.txt
@@ -8,7 +8,7 @@ apache-libcloud==3.7.0 ; sys_platform != "win32"
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   -r requirements/static/ci/cloud.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   requests
@@ -51,7 +51,7 @@ pywinrm==0.4.3
     # via -r requirements/static/ci/cloud.in
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   apache-libcloud
diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt
index 9afb42cb9bb..2201b7af000 100644
--- a/requirements/static/ci/py3.9/darwin.txt
+++ b/requirements/static/ci/py3.9/darwin.txt
@@ -60,10 +60,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/darwin.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -315,7 +315,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.9/darwin.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/darwin.txt
     #   -r requirements/base.txt
@@ -378,10 +378,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.8.0 ; python_version <= "3.9"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -442,11 +445,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.9/darwin.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/darwin.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt
index 320123d7592..bc711d0b691 100644
--- a/requirements/static/ci/py3.9/docs.txt
+++ b/requirements/static/ci/py3.9/docs.txt
@@ -16,7 +16,7 @@ autocommand==2.2.2
     #   jaraco.text
 babel==2.12.1
     # via sphinx
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   requests
@@ -123,7 +123,7 @@ pyyaml==6.0.1
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   myst-docutils
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   sphinx
diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt
index db2e079f27b..e924c90b5c9 100644
--- a/requirements/static/ci/py3.9/freebsd.txt
+++ b/requirements/static/ci/py3.9/freebsd.txt
@@ -60,10 +60,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/freebsd.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 certvalidator==0.11.1
@@ -319,7 +319,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.9/freebsd.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/freebsd.txt
     #   -r requirements/base.txt
@@ -382,10 +382,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.8.0 ; python_version <= "3.9"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -446,11 +449,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   -c requirements/static/ci/../pkg/py3.9/freebsd.txt
     #   -r requirements/zeromq.txt
     #   pytest-salt-factories
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/freebsd.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt
index 92d500abf59..d41301640b9 100644
--- a/requirements/static/ci/py3.9/lint.txt
+++ b/requirements/static/ci/py3.9/lint.txt
@@ -6,7 +6,7 @@
 #
 astroid==3.1.0
     # via pylint
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   requests
@@ -36,7 +36,7 @@ pylint==3.1.0
     # via
     #   -r requirements/static/ci/lint.in
     #   saltpylint
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/py3.9/linux.txt
     #   docker
diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt
index 6748a941e12..2ccf064cf5b 100644
--- a/requirements/static/ci/py3.9/linux.txt
+++ b/requirements/static/ci/py3.9/linux.txt
@@ -66,10 +66,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/linux.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   httpcore
     #   httpx
     #   kubernetes
@@ -338,7 +338,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.9/linux.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/linux.txt
     #   -r requirements/base.txt
@@ -409,10 +409,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.8.0 ; python_version <= "3.9"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -482,11 +485,10 @@ redis-py-cluster==2.1.3
     # via -r requirements/static/ci/linux.in
 redis==3.5.3
     # via redis-py-cluster
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/linux.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   apache-libcloud
     #   docker
     #   etcd3-py
diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt
index 8e96ec1bfc4..4c5734fbb6d 100644
--- a/requirements/static/ci/py3.9/windows.txt
+++ b/requirements/static/ci/py3.9/windows.txt
@@ -52,10 +52,10 @@ cachetools==5.3.1
     # via google-auth
 cassandra-driver==3.28.0
     # via -r requirements/static/ci/common.in
-certifi==2023.07.22
+certifi==2023.07.22 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/windows.txt
-    #   -r requirements/static/ci/common.in
+    #   -r requirements/base.txt
     #   kubernetes
     #   requests
 cffi==1.16.0
@@ -277,7 +277,7 @@ portend==3.1.0
     # via
     #   -c requirements/static/ci/../pkg/py3.9/windows.txt
     #   cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/windows.txt
     #   -r requirements/base.txt
@@ -310,7 +310,7 @@ pyfakefs==5.3.1
     # via -r requirements/pytest.txt
 pygit2==1.13.1
     # via -r requirements/static/ci/windows.in
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/windows.txt
     #   -r requirements/base.txt
@@ -340,10 +340,13 @@ pytest-httpserver==1.0.8
     # via -r requirements/pytest.txt
 pytest-salt-factories==1.0.1
     # via -r requirements/pytest.txt
-pytest-shell-utilities==1.8.0
-    # via pytest-salt-factories
-pytest-skip-markers==1.5.0
+pytest-shell-utilities==1.8.0 ; python_version <= "3.9"
     # via
+    #   -r requirements/pytest.txt
+    #   pytest-salt-factories
+pytest-skip-markers==1.5.2 ; python_version >= "3.8"
+    # via
+    #   -r requirements/pytest.txt
     #   pytest-salt-factories
     #   pytest-shell-utilities
     #   pytest-system-statistics
@@ -413,11 +416,10 @@ pyzmq==25.1.2 ; python_version < "3.13"
     #   pytest-salt-factories
 requests-ntlm==1.2.0
     # via pywinrm
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via
     #   -c requirements/static/ci/../pkg/py3.9/windows.txt
     #   -r requirements/base.txt
-    #   -r requirements/static/ci/common.in
     #   docker
     #   etcd3-py
     #   kubernetes
diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt
index 7c4dcfe2198..3697f08a03f 100644
--- a/requirements/static/pkg/py3.10/darwin.txt
+++ b/requirements/static/pkg/py3.10/darwin.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -91,7 +93,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -117,7 +119,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt
index fd0fca7f9f0..0b5c2360986 100644
--- a/requirements/static/pkg/py3.10/freebsd.txt
+++ b/requirements/static/pkg/py3.10/freebsd.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -91,7 +93,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -117,7 +119,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt
index 7a88bbda70e..c5b6f00d650 100644
--- a/requirements/static/pkg/py3.10/linux.txt
+++ b/requirements/static/pkg/py3.10/linux.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -91,7 +93,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -117,7 +119,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 rpm-vercmp==0.1.2 ; sys_platform == "linux"
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt
index ed93ef0cfbe..346451a41c6 100644
--- a/requirements/static/pkg/py3.10/windows.txt
+++ b/requirements/static/pkg/py3.10/windows.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via
     #   clr-loader
@@ -99,7 +101,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -109,7 +111,7 @@ pydantic-core==2.16.3
     # via pydantic
 pydantic==2.6.4
     # via inflect
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via -r requirements/base.txt
 pymysql==1.1.0 ; sys_platform == "win32"
     # via -r requirements/base.txt
@@ -131,7 +133,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt
index 2ebbd04afdc..9b22a278bc3 100644
--- a/requirements/static/pkg/py3.11/darwin.txt
+++ b/requirements/static/pkg/py3.11/darwin.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -89,7 +91,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -115,7 +117,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt
index 59b95b8a4b0..6bb214365a3 100644
--- a/requirements/static/pkg/py3.11/freebsd.txt
+++ b/requirements/static/pkg/py3.11/freebsd.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -89,7 +91,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -115,7 +117,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt
index de72285e974..1fca05825c7 100644
--- a/requirements/static/pkg/py3.11/linux.txt
+++ b/requirements/static/pkg/py3.11/linux.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -89,7 +91,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -115,7 +117,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 rpm-vercmp==0.1.2 ; sys_platform == "linux"
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt
index 27019dbb346..f411797b1d6 100644
--- a/requirements/static/pkg/py3.11/windows.txt
+++ b/requirements/static/pkg/py3.11/windows.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via
     #   clr-loader
@@ -97,7 +99,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -107,7 +109,7 @@ pydantic-core==2.16.3
     # via pydantic
 pydantic==2.6.4
     # via inflect
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via -r requirements/base.txt
 pymysql==1.1.0 ; sys_platform == "win32"
     # via -r requirements/base.txt
@@ -129,7 +131,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt
index efa00427d51..4c94e994465 100644
--- a/requirements/static/pkg/py3.12/darwin.txt
+++ b/requirements/static/pkg/py3.12/darwin.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -89,7 +91,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -115,7 +117,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt
index 5a8393cf447..bd5dfd4c260 100644
--- a/requirements/static/pkg/py3.12/freebsd.txt
+++ b/requirements/static/pkg/py3.12/freebsd.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -89,7 +91,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -115,7 +117,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt
index 940d48de8ae..88501b7da7a 100644
--- a/requirements/static/pkg/py3.12/linux.txt
+++ b/requirements/static/pkg/py3.12/linux.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -89,7 +91,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -115,7 +117,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 rpm-vercmp==0.1.2 ; sys_platform == "linux"
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt
index a5e08f34c09..eab825e335d 100644
--- a/requirements/static/pkg/py3.12/windows.txt
+++ b/requirements/static/pkg/py3.12/windows.txt
@@ -14,8 +14,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2024.7.4 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via
     #   clr-loader
@@ -97,7 +99,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -107,7 +109,7 @@ pydantic-core==2.16.3
     # via pydantic
 pydantic==2.6.4
     # via inflect
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via -r requirements/base.txt
 pymysql==1.1.0 ; sys_platform == "win32"
     # via -r requirements/base.txt
@@ -129,7 +131,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.13/darwin.txt b/requirements/static/pkg/py3.13/darwin.txt
index d5828bfe09f..b89fef4abd9 100644
--- a/requirements/static/pkg/py3.13/darwin.txt
+++ b/requirements/static/pkg/py3.13/darwin.txt
@@ -14,8 +14,10 @@ attrs==24.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2024.8.30
-    # via requests
+certifi==2024.8.30 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.17.1
     # via cryptography
 charset-normalizer==3.4.0
@@ -89,7 +91,7 @@ portend==3.2.0
     # via cherrypy
 propcache==0.2.0
     # via yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.22
     # via cffi
@@ -110,7 +112,7 @@ pyyaml==6.0.2
     # via -r requirements/base.txt
 pyzmq==26.2.0 ; python_version >= "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.3
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.13/freebsd.txt b/requirements/static/pkg/py3.13/freebsd.txt
index 474cc2c93e3..83ae6013b9d 100644
--- a/requirements/static/pkg/py3.13/freebsd.txt
+++ b/requirements/static/pkg/py3.13/freebsd.txt
@@ -14,8 +14,10 @@ attrs==24.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2024.8.30
-    # via requests
+certifi==2024.8.30 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.17.1
     # via cryptography
 charset-normalizer==3.4.0
@@ -89,7 +91,7 @@ portend==3.2.0
     # via cherrypy
 propcache==0.2.0
     # via yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.22
     # via cffi
@@ -110,7 +112,7 @@ pyyaml==6.0.2
     # via -r requirements/base.txt
 pyzmq==26.2.0 ; python_version >= "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.3
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.13/linux.txt b/requirements/static/pkg/py3.13/linux.txt
index ab17698096a..7796f9d5124 100644
--- a/requirements/static/pkg/py3.13/linux.txt
+++ b/requirements/static/pkg/py3.13/linux.txt
@@ -14,8 +14,10 @@ attrs==24.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2024.8.30
-    # via requests
+certifi==2024.8.30 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.17.1
     # via cryptography
 charset-normalizer==3.4.0
@@ -89,7 +91,7 @@ portend==3.2.0
     # via cherrypy
 propcache==0.2.0
     # via yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.22
     # via cffi
@@ -110,7 +112,7 @@ pyyaml==6.0.2
     # via -r requirements/base.txt
 pyzmq==26.2.0 ; python_version >= "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 rpm-vercmp==0.1.2 ; sys_platform == "linux"
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.13/windows.txt b/requirements/static/pkg/py3.13/windows.txt
index 1364cb8d70e..63c9b1c2c42 100644
--- a/requirements/static/pkg/py3.13/windows.txt
+++ b/requirements/static/pkg/py3.13/windows.txt
@@ -14,8 +14,10 @@ attrs==24.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2024.8.30
-    # via requests
+certifi==2024.8.30 ; python_version >= "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.17.1
     # via cryptography
 charset-normalizer==3.4.0
@@ -93,7 +95,7 @@ portend==3.2.0
     # via cherrypy
 propcache==0.2.0
     # via yarl
-psutil==6.1.0
+psutil==6.1.0 ; python_version >= "3.10"
     # via -r requirements/base.txt
 pycparser==2.22
     # via cffi
@@ -119,7 +121,7 @@ pyyaml==6.0.2
     # via -r requirements/base.txt
 pyzmq==26.2.0 ; python_version >= "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.32.3 ; python_version >= "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.3
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt
index 13b0737e64d..c22a0c22ddb 100644
--- a/requirements/static/pkg/py3.8/freebsd.txt
+++ b/requirements/static/pkg/py3.8/freebsd.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2023.07.22 ; python_version < "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -93,7 +95,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -119,7 +121,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt
index 47bbcd59cb5..aa0f9f609eb 100644
--- a/requirements/static/pkg/py3.8/linux.txt
+++ b/requirements/static/pkg/py3.8/linux.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2023.07.22 ; python_version < "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -93,7 +95,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -119,7 +121,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via -r requirements/base.txt
 rpm-vercmp==0.1.2 ; sys_platform == "linux"
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt
index f48adf7ec72..21d02802785 100644
--- a/requirements/static/pkg/py3.8/windows.txt
+++ b/requirements/static/pkg/py3.8/windows.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2023.07.22 ; python_version < "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via
     #   clr-loader
@@ -101,7 +103,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -111,7 +113,7 @@ pydantic-core==2.16.3
     # via pydantic
 pydantic==2.6.4
     # via inflect
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via -r requirements/base.txt
 pymysql==1.1.0 ; sys_platform == "win32"
     # via -r requirements/base.txt
@@ -134,7 +136,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt
index acbbd66e8e7..30d2446ab00 100644
--- a/requirements/static/pkg/py3.9/darwin.txt
+++ b/requirements/static/pkg/py3.9/darwin.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2023.07.22 ; python_version < "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -91,7 +93,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -117,7 +119,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt
index 159e7d19a12..958833b7124 100644
--- a/requirements/static/pkg/py3.9/freebsd.txt
+++ b/requirements/static/pkg/py3.9/freebsd.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2023.07.22 ; python_version < "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -91,7 +93,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -117,7 +119,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt
index fe5ca6067e2..9826a16cc05 100644
--- a/requirements/static/pkg/py3.9/linux.txt
+++ b/requirements/static/pkg/py3.9/linux.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2023.07.22 ; python_version < "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via cryptography
 charset-normalizer==3.2.0
@@ -91,7 +93,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -117,7 +119,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via -r requirements/base.txt
 rpm-vercmp==0.1.2 ; sys_platform == "linux"
     # via -r requirements/base.txt
diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt
index 814a10dfc3b..fe5e28e20f2 100644
--- a/requirements/static/pkg/py3.9/windows.txt
+++ b/requirements/static/pkg/py3.9/windows.txt
@@ -16,8 +16,10 @@ attrs==23.2.0
     # via aiohttp
 autocommand==2.2.2
     # via jaraco.text
-certifi==2023.07.22
-    # via requests
+certifi==2023.07.22 ; python_version < "3.10"
+    # via
+    #   -r requirements/base.txt
+    #   requests
 cffi==1.16.0
     # via
     #   clr-loader
@@ -99,7 +101,7 @@ packaging==23.1
     # via -r requirements/base.txt
 portend==3.1.0
     # via cherrypy
-psutil==5.9.6
+psutil==5.9.6 ; python_version <= "3.9"
     # via -r requirements/base.txt
 pycparser==2.21
     # via cffi
@@ -109,7 +111,7 @@ pydantic-core==2.16.3
     # via pydantic
 pydantic==2.6.4
     # via inflect
-pymssql==2.2.7 ; sys_platform == "win32"
+pymssql==2.3.1 ; sys_platform == "win32"
     # via -r requirements/base.txt
 pymysql==1.1.0 ; sys_platform == "win32"
     # via -r requirements/base.txt
@@ -132,7 +134,7 @@ pyyaml==6.0.1
     # via -r requirements/base.txt
 pyzmq==25.1.2 ; python_version < "3.13"
     # via -r requirements/zeromq.txt
-requests==2.32.3 ; python_version >= "3.8"
+requests==2.31.0 ; python_version < "3.10"
     # via -r requirements/base.txt
 setproctitle==1.3.2
     # via -r requirements/base.txt
diff --git a/salt/_logging/handlers.py b/salt/_logging/handlers.py
index d3fa03522eb..735a4dfacbf 100644
--- a/salt/_logging/handlers.py
+++ b/salt/_logging/handlers.py
@@ -65,8 +65,11 @@ class DeferredStreamHandler(StreamHandler):
                 super().handle(record)
             finally:
                 self.__emitting = False
-        # This will raise a ValueError if the file handle has been closed.
-        super().flush()
+        # Seeing an exception from calling flush on a closed file in the test
+        # suite. Handling this condition for now but this seems to be
+        # indicitive of an un-clean teardown at some point.
+        if not self.stream.closed:
+            super().flush()
 
     def sync_with_handlers(self, handlers=()):
         """
diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py
index 1d5927319d8..321ccf794b5 100644
--- a/salt/_logging/impl.py
+++ b/salt/_logging/impl.py
@@ -158,6 +158,9 @@ LOGGING_LOGGER_CLASS = logging.getLoggerClass()
 
 
 class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
+
+    ONCECACHE = set()
+
     def __new__(cls, *args):
         """
         We override `__new__` in our logging logger class in order to provide
@@ -234,7 +237,13 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
         stack_info=False,
         stacklevel=1,
         exc_info_on_loglevel=None,
+        once=False,
     ):
+        if once:
+            if str(args) in self.ONCECACHE:
+                return
+            self.ONCECACHE.add(str(args))
+
         if extra is None:
             extra = {}
 
@@ -270,6 +279,7 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
                         exc_info_on_loglevel
                     )
                 )
+        # XXX: extra is never None
         if extra is None:
             extra = {"exc_info_on_loglevel": exc_info_on_loglevel}
         else:
diff --git a/salt/channel/client.py b/salt/channel/client.py
index 3483e7237e4..b03f4659a17 100644
--- a/salt/channel/client.py
+++ b/salt/channel/client.py
@@ -368,7 +368,7 @@ class AsyncPubChannel:
 
     async_methods = [
         "connect",
-        "_decode_messages",
+        "_decode_payload",
     ]
     close_methods = [
         "close",
diff --git a/salt/channel/server.py b/salt/channel/server.py
index 5fcca7fd59f..2b9f4f76d4b 100644
--- a/salt/channel/server.py
+++ b/salt/channel/server.py
@@ -95,7 +95,7 @@ class ReqServerChannel:
         # other things needed for _auth
         # Create the event manager
         self.event = salt.utils.event.get_master_event(
-            self.opts, self.opts["sock_dir"], listen=False
+            self.opts, self.opts["sock_dir"], listen=False, io_loop=io_loop
         )
         self.auto_key = salt.daemons.masterapi.AutoKey(self.opts)
         # only create a con_cache-client if the con_cache is active
diff --git a/salt/cli/batch.py b/salt/cli/batch.py
index 2e43b0ee22b..3a648c02b86 100644
--- a/salt/cli/batch.py
+++ b/salt/cli/batch.py
@@ -83,7 +83,14 @@ class Batch:
                         )
                     break
                 if m is not None:
-                    fret.add(m)
+                    if "failed" in ret[m] and ret[m]["failed"] is True:
+                        log.debug(
+                            "minion '%s' failed test.ping - will be returned as a down minion",
+                            m,
+                        )
+                    else:
+                        fret.add(m)
+
         return (list(fret), ping_gen, nret.difference(fret))
 
     def get_bnum(self):
@@ -292,11 +299,12 @@ class Batch:
                         # We already know some minions didn't respond to the ping, so inform
                         # inform user attempt to run a job failed
                         salt.utils.stringutils.print_cli(
-                            "Minion '%s' failed to respond to job sent", minion
+                            f"Minion '{minion}' failed to respond to job sent"
                         )
 
                     if self.opts.get("failhard"):
                         failhard = True
+                    ret[minion] = data
                 else:
                     # If we are executing multiple modules with the same cmd,
                     # We use the highest retcode.
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 140b8bc58cd..6bbac005858 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -540,7 +540,7 @@ class SSH(MultiprocessingStateMixin):
             )
             deploy = input("[Y/n] ")
             if deploy.startswith(("n", "N")):
-                return ret
+                return ret, None
             target["passwd"] = getpass.getpass(
                 "Password for {}@{}: ".format(target["user"], host)
             )
diff --git a/salt/client/ssh/wrapper/__init__.py b/salt/client/ssh/wrapper/__init__.py
index 77c7c050893..c4235c239f7 100644
--- a/salt/client/ssh/wrapper/__init__.py
+++ b/salt/client/ssh/wrapper/__init__.py
@@ -5,8 +5,8 @@ to be easily rewritten to execute in a way that makes them do the same tasks
 as ZeroMQ salt, but via ssh.
 """
 
-import copy
 import logging
+from collections.abc import MutableMapping
 
 import salt.client.ssh
 import salt.loader
@@ -87,10 +87,9 @@ class SSHCommandExecutionError(SSHException, CommandExecutionError):
         return super().to_ret()
 
     def __str__(self):
-        ret = self.to_ret()
-        if not isinstance(ret, str):
-            ret = self.stderr or self.stdout
-        return f"{self._error}: {ret}"
+        if self.retcode > 0:
+            return f"{self._error}: {self.stderr or self.stdout}"
+        return self._error
 
 
 class SSHPermissionDeniedError(SSHException):
@@ -118,7 +117,62 @@ class SSHMalformedReturnError(SSHException):
     _error = "Return dict was malformed"
 
 
-class FunctionWrapper:
+class LoadedMod:
+    """
+    This class is used as a proxy to a loaded wrapper module
+    or the module part of a call to the target when
+    a non-recommended syntax is used for loader access
+    (like ``salt.grains.get`` or ``salt["grains"].get``).
+    """
+
+    __slots__ = ("mod", "wrapper")
+
+    def __init__(self, mod, wrapper):
+        self.mod = mod
+        self.wrapper = wrapper
+
+    def __getattr__(self, name):
+        """
+        Return the requested function.
+        """
+        try:
+            return self.wrapper[f"{self.mod}.{name}"]
+        except KeyError:
+            # This  shouldn't happen since we wrap unknown calls to the target
+            raise AttributeError(
+                f"No attribute by the name of {name} was found on {self.mod}"
+            )
+
+    def __setitem__(self, name, value):
+        """
+        Set aliases for functions
+        """
+        self.wrapper[f"{self.mod}.{name}"] = value
+
+    def __delitem__(self, name):
+        """
+        Remove aliases for functions
+        """
+        del self.wrapper[f"{self.mod}.{name}"]
+
+    def __repr__(self):
+        try:
+            # Determine if we're representing a wrapper module or
+            # an unknown execution module on the target.
+            # We need to use the attribute since __getitem__ does not
+            # allow module-level access.
+            getattr(
+                self.wrapper.wfuncs, self.mod
+            )  # pylint: disable=pointless-statement
+            prefix = self.wrapper.wfuncs.loaded_base_name + "."
+            name = self.__class__.__name__
+        except AttributeError:
+            prefix = ""
+            name = "SSHTargetMod"
+        return f"<{name} module='{prefix}{self.mod}'>"
+
+
+class FunctionWrapper(MutableMapping):
     """
     Create an object that acts like the salt function dict and makes function
     calls remotely via the SSH shell system
@@ -132,14 +186,12 @@ class FunctionWrapper:
         wfuncs=None,
         mods=None,
         fsclient=None,
-        cmd_prefix=None,
         aliases=None,
         minion_opts=None,
         **kwargs,
     ):
         super().__init__()
-        self.cmd_prefix = cmd_prefix
-        self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
+        self.wfuncs = wfuncs if wfuncs is not None else {}
         self.opts = opts
         self.mods = mods if isinstance(mods, dict) else {}
         self.kwargs = {"id_": id_, "host": host}
@@ -157,7 +209,7 @@ class FunctionWrapper:
         __getitem__ keys 0 and up until IndexError
         """
         try:
-            self[key]  # pylint: disable=W0104
+            self[key]  # pylint: disable=pointless-statement
             return True
         except KeyError:
             return False
@@ -166,32 +218,12 @@ class FunctionWrapper:
         """
         Return the function call to simulate the salt local lookup system
         """
-        if "." not in cmd and not self.cmd_prefix:
+        if "." not in cmd:
             # Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
-            # containing only 'cmd' module calls, in that case. Create a new
-            # FunctionWrapper which contains the prefix 'cmd' (again, for the
-            # salt.cmd.run example)
-            kwargs = copy.deepcopy(self.kwargs)
-            id_ = kwargs.pop("id_")
-            host = kwargs.pop("host")
-            return FunctionWrapper(
-                self.opts,
-                id_,
-                host,
-                wfuncs=self.wfuncs,
-                mods=self.mods,
-                fsclient=self.fsclient,
-                cmd_prefix=cmd,
-                aliases=self.aliases,
-                minion_opts=self.minion_opts,
-                **kwargs,
-            )
-
-        if self.cmd_prefix:
-            # We're in an inner FunctionWrapper as created by the code block
-            # above. Reconstruct the original cmd in the form 'cmd.run' and
-            # then evaluate as normal
-            cmd = f"{self.cmd_prefix}.{cmd}"
+            # containing only 'cmd' module calls
+            # We don't know which modules are available on the target, so just
+            # return the module namespace without any checks.
+            return LoadedMod(cmd, self)
 
         if cmd in self.wfuncs:
             return self.wfuncs[cmd]
@@ -231,18 +263,12 @@ class FunctionWrapper:
         """
         Set aliases for functions
         """
-        if "." not in cmd and not self.cmd_prefix:
+        if "." not in cmd:
             # Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
             # containing only 'cmd' module calls, in that case. We don't
             # support assigning directly to prefixes in this way
             raise KeyError(f"Cannot assign to module key {cmd} in the FunctionWrapper")
 
-        if self.cmd_prefix:
-            # We're in an inner FunctionWrapper as created by the first code
-            # block in __getitem__. Reconstruct the original cmd in the form
-            # 'cmd.run' and then evaluate as normal
-            cmd = f"{self.cmd_prefix}.{cmd}"
-
         if cmd in self.wfuncs:
             self.wfuncs[cmd] = value
 
@@ -251,14 +277,46 @@ class FunctionWrapper:
         # later in __getitem__
         self.aliases[cmd] = value
 
-    def get(self, cmd, default):
+    def __delitem__(self, cmd):
         """
-        Mirrors behavior of dict.get
+        Remove aliases for functions
         """
-        if cmd in self:
-            return self[cmd]
-        else:
-            return default
+        if "." not in cmd:
+            # Form of salt.cmd.run in Jinja
+            raise KeyError(f"Cannot delete module key {cmd} in the FunctionWrapper")
+
+        if cmd in self.wfuncs:
+            del self.wfuncs[cmd]
+
+        del self.aliases[cmd]
+
+    def __len__(self):
+        """
+        Return the count of wrapper modules and aliases.
+        We don't know which modules will be available on the target.
+        """
+        return len(self.wfuncs) + len(self.aliases)
+
+    def __iter__(self):
+        """
+        Iterate through wrapper modules and aliases.
+        We don't know which modules will be available on the target.
+        """
+        yield from self.wfuncs
+        yield from self.aliases
+
+    def __getattr__(self, mod_or_func):
+        """
+        Ensure the behavior is similar to the usual LazyLoader regarding
+        attribute access.
+        """
+        if mod_or_func.startswith("__") and mod_or_func.endswith("__"):
+            # Don't pretend dunders are set.
+            raise AttributeError(mod_or_func)
+        try:
+            return self.__getitem__(mod_or_func)
+        except KeyError:
+            raise AttributeError(mod_or_func)
 
 
 def parse_ret(stdout, stderr, retcode, result_only=False):
diff --git a/salt/client/ssh/wrapper/slsutil.py b/salt/client/ssh/wrapper/slsutil.py
index 586d09ad2d6..a94a6b16df1 100644
--- a/salt/client/ssh/wrapper/slsutil.py
+++ b/salt/client/ssh/wrapper/slsutil.py
@@ -121,7 +121,7 @@ def renderer(path=None, string=None, default_renderer="jinja|yaml", **kwargs):
     .. code-block:: jinja
 
         #!jinja|yaml
-        {% set apache = salt.grains.filter_by({
+        {% set apache = salt['grains.filter_by']({
             ...normal jinja map file here...
         }, merge=salt.pillar.get('apache:lookup')) %}
         {{ apache | yaml() }}
@@ -141,7 +141,7 @@ def renderer(path=None, string=None, default_renderer="jinja|yaml", **kwargs):
 
     .. code-block:: jinja
 
-        {% set apache = salt.slsutil.renderer('map.sls') %}
+        {% set apache = salt['slsutil.renderer']('map.sls') %}
 
     CLI Example:
 
@@ -211,7 +211,7 @@ def serialize(serializer, obj, **mod_kwargs):
 
     .. code-block:: jinja
 
-        {% set json_string = salt.slsutil.serialize('json',
+        {% set json_string = salt['slsutil.serialize']('json',
             {'foo': 'Foo!'}) %}
     """
     kwargs = salt.utils.args.clean_kwargs(**mod_kwargs)
@@ -235,7 +235,7 @@ def deserialize(serializer, stream_or_string, **mod_kwargs):
 
     .. code-block:: jinja
 
-        {% set python_object = salt.slsutil.deserialize('json',
+        {% set python_object = salt['slsutil.deserialize']('json',
             '{"foo": "Foo!"}') %}
     """
     kwargs = salt.utils.args.clean_kwargs(**mod_kwargs)
diff --git a/salt/cloud/deploy/RHEL5-git.sh b/salt/cloud/deploy/RHEL5-git.sh
deleted file mode 100644
index 068391f2c68..00000000000
--- a/salt/cloud/deploy/RHEL5-git.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-
-# This legacy script pre-dates the salt-bootstrap project. In most cases, the
-# bootstrap-salt.sh script is the recommended script for installing salt onto
-# a new minion. However, that may not be appropriate for all situations. This
-# script remains to help fill those needs, and to provide an example for users
-# needing to write their own deploy scripts.
-
-rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/5/x86_64/epel-release-5-4.noarch.rpm
-yum install -y salt-minion git
-rm -rf /usr/lib/python2.6/site-packages/salt*
-rm -rf /usr/bin/salt-*
-mkdir -p /root/git
-cd /root/git
-git clone git://github.com/saltstack/salt.git
-cd salt
-python26 setup.py install
-cd
-mkdir -p /etc/salt/pki
-echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
-echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
-cat > /etc/salt/minion <<EOF
-{{minion}}
-EOF
-
-/sbin/chkconfig salt-minion on
-service salt-minion start
diff --git a/salt/cloud/deploy/RHEL5.sh b/salt/cloud/deploy/RHEL5.sh
deleted file mode 100644
index e707cbbc170..00000000000
--- a/salt/cloud/deploy/RHEL5.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-
-# This legacy script pre-dates the salt-bootstrap project. In most cases, the
-# bootstrap-salt.sh script is the recommended script for installing salt onto
-# a new minion. However, that may not be appropriate for all situations. This
-# script remains to help fill those needs, and to provide an example for users
-# needing to write their own deploy scripts.
-
-rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/5/x86_64/epel-release-5-4.noarch.rpm
-yum install -y salt-minion
-mkdir -p /etc/salt/pki
-echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
-echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
-cat > /etc/salt/minion <<EOF
-{{minion}}
-EOF
-
-/sbin/chkconfig salt-minion on
-service salt-minion start
diff --git a/salt/cloud/deploy/RHEL6-git.sh b/salt/cloud/deploy/RHEL6-git.sh
deleted file mode 100644
index 651a1f0a8ef..00000000000
--- a/salt/cloud/deploy/RHEL6-git.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-
-# This legacy script pre-dates the salt-bootstrap project. In most cases, the
-# bootstrap-salt.sh script is the recommended script for installing salt onto
-# a new minion. However, that may not be appropriate for all situations. This
-# script remains to help fill those needs, and to provide an example for users
-# needing to write their own deploy scripts.
-
-rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/6/x86_64/epel-release-6-8.noarch.rpm
-yum -y install salt-minion git --enablerepo epel-testing
-rm -rf /usr/lib/python/site-packages/salt*
-rm -rf /usr/bin/salt-*
-mkdir -p /root/git
-cd /root/git
-git clone git://github.com/saltstack/salt.git
-cd salt
-python setup.py install
-cd
-mkdir -p /etc/salt/pki
-echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
-echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
-cat > /etc/salt/minion <<EOF
-{{minion}}
-EOF
-
-/sbin/chkconfig salt-minion on
-service salt-minion start
diff --git a/salt/cloud/deploy/RHEL6.sh b/salt/cloud/deploy/RHEL6.sh
deleted file mode 100644
index 752cdba933a..00000000000
--- a/salt/cloud/deploy/RHEL6.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-
-# This legacy script pre-dates the salt-bootstrap project. In most cases, the
-# bootstrap-salt.sh script is the recommended script for installing salt onto
-# a new minion. However, that may not be appropriate for all situations. This
-# script remains to help fill those needs, and to provide an example for users
-# needing to write their own deploy scripts.
-
-rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/6/x86_64/epel-release-6-8.noarch.rpm
-yum -y install salt-minion --enablerepo epel-testing
-mkdir -p /etc/salt/pki
-echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
-echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
-cat > /etc/salt/minion <<EOF
-{{minion}}
-EOF
-
-/sbin/chkconfig salt-minion on
-service salt-minion start
diff --git a/salt/cloud/deploy/bootstrap-salt.sh b/salt/cloud/deploy/bootstrap-salt.sh
index c632146e44a..bd29313b862 100644
--- a/salt/cloud/deploy/bootstrap-salt.sh
+++ b/salt/cloud/deploy/bootstrap-salt.sh
@@ -1,9 +1,12 @@
-#!/bin/sh -
+#!/bin/sh
 
 # WARNING: Changes to this file in the salt repo will be overwritten!
 # Please submit pull requests against the salt-bootstrap repo:
 # https://github.com/saltstack/salt-bootstrap
-
+# shellcheck disable=SC2317
+# shellcheck disable=SC2086
+# shellcheck disable=SC2329
+#
 #======================================================================================================================
 # vim: softtabstop=4 shiftwidth=4 expandtab fenc=utf-8 spell spelllang=en cc=120
 #======================================================================================================================
@@ -14,7 +17,7 @@
 #
 #          BUGS: https://github.com/saltstack/salt-bootstrap/issues
 #
-#     COPYRIGHT: (c) 2012-2022 by the SaltStack Team, see AUTHORS.rst for more
+#     COPYRIGHT: (c) 2012-2024 by the SaltStack Team, see AUTHORS.rst for more
 #                details.
 #
 #       LICENSE: Apache 2.0
@@ -23,7 +26,7 @@
 #======================================================================================================================
 set -o nounset                              # Treat unset variables as an error
 
-__ScriptVersion="2024.01.04"
+__ScriptVersion="2024.12.12"
 __ScriptName="bootstrap-salt.sh"
 
 __ScriptFullName="$0"
@@ -121,16 +124,35 @@ __check_command_exists() {
     command -v "$1" > /dev/null 2>&1
 }
 
+#---  FUNCTION  -------------------------------------------------------------------------------------------------------
+#          NAME:  __check_services_systemd_functional
+#   DESCRIPTION:  Set _SYSTEMD_FUNCTIONAL = BS_TRUE or BS_FALSE case where systemd is functional (for example: container may not have systemd)
+#----------------------------------------------------------------------------------------------------------------------
+__check_services_systemd_functional() {
+
+    # check if systemd is functional, having systemctl present is insufficient
+
+    if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_FALSE ]; then
+        # already determined systemd is not functional, default is 1
+        return
+    fi
+
+    if __check_command_exists systemctl; then
+        # shellcheck disable=SC2034
+        _SYSTEMD_HELP="$(systemctl --help)"
+    else
+        _SYSTEMD_FUNCTIONAL=$BS_FALSE
+        echoerror "systemctl: command not found, assume systemd not implemented, _SYSTEMD_FUNCTIONAL $_SYSTEMD_FUNCTIONAL"
+    fi
+}   # ----------  end of function __check_services_systemd_functional  ----------
+
 #---  FUNCTION  -------------------------------------------------------------------------------------------------------
 #          NAME:  __check_pip_allowed
 #   DESCRIPTION:  Simple function to let the users know that -P needs to be used.
 #----------------------------------------------------------------------------------------------------------------------
 __check_pip_allowed() {
-    if [ $# -eq 1 ]; then
-        _PIP_ALLOWED_ERROR_MSG=$1
-    else
-        _PIP_ALLOWED_ERROR_MSG="pip based installations were not allowed. Retry using '-P'"
-    fi
+
+    _PIP_ALLOWED_ERROR_MSG="pip based installations were not allowed. Retry using '-P'"
 
     if [ "$_PIP_ALLOWED" -eq $BS_FALSE ]; then
         echoerror "$_PIP_ALLOWED_ERROR_MSG"
@@ -199,6 +221,7 @@ __check_config_dir() {
 #  DESCRIPTION:  Checks the placed after the install arguments
 #----------------------------------------------------------------------------------------------------------------------
 __check_unparsed_options() {
+
     shellopts="$1"
     # grep alternative for SunOS
     if [ -f /usr/xpg4/bin/grep ]; then
@@ -228,6 +251,7 @@ _TEMP_KEYS_DIR="null"
 _SLEEP="${__DEFAULT_SLEEP}"
 _INSTALL_MASTER=$BS_FALSE
 _INSTALL_SYNDIC=$BS_FALSE
+_INSTALL_SALT_API=$BS_FALSE
 _INSTALL_MINION=$BS_TRUE
 _INSTALL_CLOUD=$BS_FALSE
 _VIRTUALENV_DIR=${BS_VIRTUALENV_DIR:-"null"}
@@ -266,19 +290,16 @@ _CUSTOM_REPO_URL="null"
 _CUSTOM_MASTER_CONFIG="null"
 _CUSTOM_MINION_CONFIG="null"
 _QUIET_GIT_INSTALLATION=$BS_FALSE
-_REPO_URL="repo.saltproject.io"
-_ONEDIR_DIR="salt"
-_ONEDIR_NIGHTLY_DIR="salt-dev/${_ONEDIR_DIR}"
+_REPO_URL="packages.broadcom.com/artifactory"
 _PY_EXE="python3"
-_INSTALL_PY="$BS_FALSE"
-_TORNADO_MAX_PY3_VERSION="5.0"
-_POST_NEON_INSTALL=$BS_FALSE
 _MINIMUM_PIP_VERSION="9.0.1"
-_MINIMUM_SETUPTOOLS_VERSION="9.1"
-_POST_NEON_PIP_INSTALL_ARGS="--prefix=/usr"
+_MINIMUM_SETUPTOOLS_VERSION="65.6.3"
+_MAXIMUM_SETUPTOOLS_VERSION="69.0"
+_PIP_INSTALL_ARGS="--prefix=/usr"
 _PIP_DOWNLOAD_ARGS=""
 _QUICK_START="$BS_FALSE"
 _AUTO_ACCEPT_MINION_KEYS="$BS_FALSE"
+_SYSTEMD_FUNCTIONAL=$BS_TRUE
 
 # Defaults for install arguments
 ITYPE="stable"
@@ -297,9 +318,9 @@ __usage() {
     - stable               Install latest stable release. This is the default
                            install type
     - stable [branch]      Install latest version on a branch. Only supported
-                           for packages available at repo.saltproject.io
+                           for packages available at packages.broadcom.com
     - stable [version]     Install a specific version. Only supported for
-                           packages available at repo.saltproject.io
+                           packages available at packages.broadcom.com
                            To pin a 3xxx minor version, specify it as 3xxx.0
     - testing              RHEL-family specific: configure EPEL testing repo
     - git                  Install from the head of the master branch
@@ -307,17 +328,11 @@ __usage() {
                            commit)
     - onedir               Install latest onedir release.
     - onedir [version]     Install a specific version. Only supported for
-                           onedir packages available at repo.saltproject.io
+                           onedir packages available at packages.broadcom.com
 
     - onedir_rc            Install latest onedir RC release.
     - onedir_rc [version]  Install a specific version. Only supported for
-                           onedir RC packages available at repo.saltproject.io
-    - old-stable           Install latest old stable release.
-    - old-stable [branch]  Install latest version on a branch. Only supported
-                           for packages available at repo.saltproject.io
-    - old-stable [version] Install a specific version. Only supported for
-                           packages available at repo.saltproject.io
-                           To pin a 3xxx minor version, specify it as 3xxx.0
+                           onedir RC packages available at packages.broadcom.com
 
   Examples:
     - ${__ScriptName}
@@ -326,16 +341,15 @@ __usage() {
     - ${__ScriptName} stable 3006.1
     - ${__ScriptName} testing
     - ${__ScriptName} git
-    - ${__ScriptName} git 2017.7
-    - ${__ScriptName} git v2017.7.2
+    - ${__ScriptName} git 3006.7
+    - ${__ScriptName} git v3006.8
+    - ${__ScriptName} git 3007.1
+    - ${__ScriptName} git v3007.1
     - ${__ScriptName} git 06f249901a2e2f1ed310d58ea3921a129f214358
     - ${__ScriptName} onedir
     - ${__ScriptName} onedir 3006
     - ${__ScriptName} onedir_rc
-    - ${__ScriptName} onedir_rc 3006
-    - ${__ScriptName} old-stable
-    - ${__ScriptName} old-stable 3005
-    - ${__ScriptName} old-stable 3005.1
+    - ${__ScriptName} onedir_rc 3008
 
 
   Options:
@@ -349,7 +363,7 @@ __usage() {
         step.
     -c  Temporary configuration directory
     -C  Only run the configuration function. Implies -F (forced overwrite).
-        To overwrite Master or Syndic configs, -M or -S, respectively, must
+        To overwrite Master, Syndic or Api configs, -M,-S or -W, respectively, must
         also be specified. Salt installation will be ommitted, but some of the
         dependencies could be installed to write configuration with -j or -J.
     -d  Disables checking if Salt services are enabled to start on system boot.
@@ -401,8 +415,8 @@ __usage() {
         And automatically accept the minion key.
     -R  Specify a custom repository URL. Assumes the custom repository URL
         points to a repository that mirrors Salt packages located at
-        repo.saltproject.io. The option passed with -R replaces the
-        "repo.saltproject.io". If -R is passed, -r is also set. Currently only
+        packages.broadcom.com. The option passed with -R replaces the
+        "packages.broadcom.com". If -R is passed, -r is also set. Currently only
         works on CentOS/RHEL and Debian based distributions and macOS.
     -s  Sleep time used when waiting for daemons to start, restart and when
         checking for the services running. Default: ${__DEFAULT_SLEEP}
@@ -414,23 +428,15 @@ __usage() {
     -v  Display script version
     -V  Install Salt into virtualenv
         (only available for Ubuntu based distributions)
-    -x  Changes the Python version used to install Salt.
-        For CentOS 6 git installations python2.7 is supported.
-        Fedora git installation, CentOS 7, Ubuntu 18.04 support python3.
+    -W  Also install salt-api
+    -x  Changes the Python version used to install Salt (default: Python 3).
+        Python 2.7 is no longer supported.
     -X  Do not start daemons after installation
-    -y  Installs a different python version on host. Currently this has only been
-        tested with CentOS 6 and is considered experimental. This will install the
-        ius repo on the box if disable repo is false. This must be used in conjunction
-        with -x <pythonversion>.  For example:
-            sh bootstrap.sh -P -y -x python2.7 git v2017.7.2
-        The above will install python27 and install the git version of salt using the
-        python2.7 executable. This only works for git and pip installations.
 
 EOT
 }   # ----------  end of function __usage  ----------
 
-
-while getopts ':hvnDc:g:Gyx:k:s:MSNXCPFUKIA:i:Lp:dH:bflV:J:j:rR:aqQ' opt
+while getopts ':hvnDc:g:Gx:k:s:MSWNXCPFUKIA:i:Lp:dH:bflV:J:j:rR:aqQ' opt
 do
   case "${opt}" in
 
@@ -450,6 +456,7 @@ do
     s )  _SLEEP=$OPTARG                                 ;;
     M )  _INSTALL_MASTER=$BS_TRUE                       ;;
     S )  _INSTALL_SYNDIC=$BS_TRUE                       ;;
+    W )  _INSTALL_SALT_API=$BS_TRUE                     ;;
     N )  _INSTALL_MINION=$BS_FALSE                      ;;
     X )  _START_DAEMONS=$BS_FALSE                       ;;
     C )  _CONFIG_ONLY=$BS_TRUE                          ;;
@@ -476,7 +483,6 @@ do
     q )  _QUIET_GIT_INSTALLATION=$BS_TRUE               ;;
     Q )  _QUICK_START=$BS_TRUE                          ;;
     x )  _PY_EXE="$OPTARG"                              ;;
-    y )  _INSTALL_PY="$BS_TRUE"                         ;;
 
     \?)  echo
          echoerror "Option does not exist : $OPTARG"
@@ -488,7 +494,6 @@ do
 done
 shift $((OPTIND-1))
 
-
 # Define our logging file and pipe paths
 LOGFILE="/tmp/$( echo "$__ScriptName" | sed s/.sh/.log/g )"
 LOGPIPE="/tmp/$( echo "$__ScriptName" | sed s/.sh/.logpipe/g )"
@@ -539,8 +544,8 @@ __exit_cleanup() {
             echodebug "Cleaning up the Salt Temporary Git Repository"
             # shellcheck disable=SC2164
             cd "${__SALT_GIT_CHECKOUT_PARENT_DIR}"
-            rm -rf "${_SALT_GIT_CHECKOUT_DIR}"
-            #rm -rf "${_SALT_GIT_CHECKOUT_DIR}/deps"
+            rm -fR "${_SALT_GIT_CHECKOUT_DIR}"
+            #rm -fR "${_SALT_GIT_CHECKOUT_DIR}/deps"
         else
             echowarn "Not cleaning up the Salt Temporary git repository on request"
             echowarn "Note that if you intend to re-run this script using the git approach, you might encounter some issues"
@@ -596,7 +601,15 @@ fi
 echoinfo "Running version: ${__ScriptVersion}"
 echoinfo "Executed by: ${CALLER}"
 echoinfo "Command line: '${__ScriptFullName} ${__ScriptArgs}'"
-echowarn "Running the unstable version of ${__ScriptName}"
+
+# Defaults
+STABLE_REV="latest"
+ONEDIR_REV="latest"
+_ONEDIR_REV="latest"
+YUM_REPO_FILE="/etc/yum.repos.d/salt.repo"
+
+# check if systemd is functional
+__check_services_systemd_functional
 
 # Define installation type
 if [ "$#" -gt 0 ];then
@@ -606,13 +619,24 @@ if [ "$#" -gt 0 ];then
 fi
 
 # Check installation type
-if [ "$(echo "$ITYPE" | grep -E '(stable|testing|git|onedir|onedir_rc|old-stable)')" = "" ]; then
+if [ "$(echo "$ITYPE" | grep -E '(latest|default|stable|testing|git|onedir|onedir_rc)')" = "" ]; then
     echoerror "Installation type \"$ITYPE\" is not known..."
     exit 1
 fi
 
+## allows GitHub Actions CI/CD easier handling of latest and default
+if [ "$ITYPE" = "latest" ] || [ "$ITYPE" = "default" ]; then
+    STABLE_REV="latest"
+    ONEDIR_REV="latest"
+    _ONEDIR_REV="latest"
+    ITYPE="onedir"
+    if [ "$#" -gt 0 ];then
+        shift
+    fi
+    echodebug "using ITYPE onedir for input 'latest' or 'default', cmd args left ,$#,"
+
 # If doing a git install, check what branch/tag/sha will be checked out
-if [ "$ITYPE" = "git" ]; then
+elif [ "$ITYPE" = "git" ]; then
     if [ "$#" -eq 0 ];then
         GIT_REV="master"
     else
@@ -626,45 +650,25 @@ if [ "$ITYPE" = "git" ]; then
 # If doing stable install, check if version specified
 elif [ "$ITYPE" = "stable" ]; then
     if [ "$#" -eq 0 ];then
+        STABLE_REV="latest"
         ONEDIR_REV="latest"
         _ONEDIR_REV="latest"
         ITYPE="onedir"
     else
-        if [ "$(echo "$1" | grep -E '^(nightly|latest|3005|3006)$')" != "" ]; then
+        if [ "$(echo "$1" | grep -E '^(latest|3006|3007)$')" != "" ]; then
+            STABLE_REV="$1"
             ONEDIR_REV="$1"
             _ONEDIR_REV="$1"
             ITYPE="onedir"
             shift
-        elif [ "$(echo "$1" | grep -E '^([3-9][0-5]{2}[5-9](\.[0-9]*)?)')" != "" ]; then
-            ONEDIR_REV="minor/$1"
+        elif [ "$(echo "$1" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+            STABLE_REV="$1"
+            ONEDIR_REV="$1"
             _ONEDIR_REV="$1"
             ITYPE="onedir"
             shift
         else
-            echo "Unknown stable version: $1 (valid: 3005, 3006, latest)"
-            exit 1
-        fi
-    fi
-
-# If doing old-stable install, check if version specified
-elif [ "$ITYPE" = "old-stable" ]; then
-    if [ "$#" -eq 0 ];then
-        ITYPE="stable"
-    else
-        if [ "$(echo "$1" | grep -E '^(3003|3004|3005)$')" != "" ]; then
-            STABLE_REV="$1"
-            ITYPE="stable"
-            shift
-        elif [ "$(echo "$1" | grep -E '^([3-9][0-5]{3}(\.[0-9]*)?)$')" != "" ]; then
-            # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix
-            ITYPE="stable"
-            STABLE_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/')
-            if [ "$(uname)" != "Darwin" ]; then
-                STABLE_REV="archive/$STABLE_REV"
-            fi
-            shift
-        else
-            echo "Unknown old stable version: $1 (valid: 3003, 3004, 3005)"
+            echo "Unknown stable version: $1 (valid: 3006, 3007, latest), versions older than 3006 are not available"
             exit 1
         fi
     fi
@@ -672,52 +676,54 @@ elif [ "$ITYPE" = "old-stable" ]; then
 elif [ "$ITYPE" = "onedir" ]; then
     if [ "$#" -eq 0 ];then
         ONEDIR_REV="latest"
+        STABLE_REV="latest"
     else
-        if [ "$(echo "$1" | grep -E '^(nightly|latest|3005|3006)$')" != "" ]; then
+        if [ "$(echo "$1" | grep -E '^(latest|3006|3007)$')" != "" ]; then
             ONEDIR_REV="$1"
-            shift
-        elif [ "$(echo "$1" | grep -E '^(3005(\.[0-9]*)?)')" != "" ]; then
-            # Handle the 3005.0 version as 3005 archive (pin to minor) and strip the fake ".0" suffix
-            ONEDIR_REV=$(echo "$1" | sed -E 's/^(3005)\.0$/\1/')
-            ONEDIR_REV="minor/$ONEDIR_REV"
+            STABLE_REV="$1"
             shift
         elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}(\.[0-9]*)?)')" != "" ]; then
-            ONEDIR_REV="minor/$1"
+            ONEDIR_REV="$1"
+            STABLE_REV="$1"
             shift
         else
-            echo "Unknown onedir version: $1 (valid: 3005, 3006, latest, nightly.)"
+            echo "Unknown onedir version: $1 (valid: 3006, 3007, latest), versions older than 3006 are not available"
             exit 1
         fi
     fi
 
 elif [ "$ITYPE" = "onedir_rc" ]; then
-    # Change the _ONEDIR_DIR to be the location for the RC packages
-    _ONEDIR_DIR="salt_rc/salt"
+    echoerror "RC Releases are not supported at this time"
 
-    # Change ITYPE to onedir so we use the regular onedir functions
-    ITYPE="onedir"
-
-    if [ "$#" -eq 0 ];then
-        ONEDIR_REV="latest"
-    else
-        if [ "$(echo "$1" | grep -E '^(latest)$')" != "" ]; then
-            ONEDIR_REV="$1"
-            shift
-        elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}?rc[0-9]-[0-9]$)')" != "" ]; then
-            # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix
-            #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/')
-            ONEDIR_REV="minor/$1"
-            shift
-        elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}\.[0-9]?rc[0-9]$)')" != "" ]; then
-            # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix
-            #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/')
-            ONEDIR_REV="minor/$1"
-            shift
-        else
-            echo "Unknown onedir_rc version: $1 (valid: 3005-1, latest.)"
-            exit 1
-        fi
-    fi
+##    # Change the _ONEDIR_DIR to be the location for the RC packages
+##    _ONEDIR_DIR="salt_rc/salt"
+##
+##    # Change ITYPE to onedir so we use the regular onedir functions
+##    ITYPE="onedir"
+##
+##    if [ "$#" -eq 0 ];then
+##        ONEDIR_REV="latest"
+##    else
+##        if [ "$(echo "$1" | grep -E '^(latest)$')" != "" ]; then
+##            ONEDIR_REV="$1"
+##            shift
+##        elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}?rc[0-9]-[0-9]$)')" != "" ]; then
+##            # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix
+##            #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/')
+##            ## ONEDIR_REV="minor/$1" don't have minor directory anymore
+##            ONEDIR_REV="$1"
+##            shift
+##        elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}\.[0-9]?rc[0-9]$)')" != "" ]; then
+##            # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix
+##            #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/')
+##            ## ONEDIR_REV="minor/$1" don't have minor directory anymore
+##            ONEDIR_REV="$1"
+##            shift
+##        else
+##            echo "Unknown onedir_rc version: $1 (valid: 3006-8, 3007-1, latest)"
+##            exit 1
+##        fi
+##    fi
 fi
 
 # Doing a quick start, so install master
@@ -767,7 +773,7 @@ if [ "$($whoami)" != "root" ]; then
 fi
 
 # Check that we're actually installing one of minion/master/syndic
-if [ "$_INSTALL_MINION" -eq $BS_FALSE ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && [ "$_CONFIG_ONLY" -eq $BS_FALSE ]; then
+if [ "$_INSTALL_MINION" -eq $BS_FALSE ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && [ "$_INSTALL_SALT_API" -eq $BS_FALSE ] && [ "$_CONFIG_ONLY" -eq $BS_FALSE ]; then
     echowarn "Nothing to install or configure"
     exit 1
 fi
@@ -800,6 +806,12 @@ if [ "$_CUSTOM_MINION_CONFIG" != "null" ]; then
     fi
 fi
 
+
+# Default to Python 3, no longer support for Python 2
+PY_PKG_VER=3
+_PY_PKG_VER="python3"
+_PY_MAJOR_VERSION="3"
+
 # Check if we're installing via a different Python executable and set major version variables
 if [ -n "$_PY_EXE" ]; then
     if [ "$(uname)" = "Darwin" ]; then
@@ -808,19 +820,21 @@ if [ -n "$_PY_EXE" ]; then
       _PY_PKG_VER=$(echo "$_PY_EXE" | sed -E "s/\\.//g")
     fi
 
-    _PY_MAJOR_VERSION=$(echo "$_PY_PKG_VER" | cut -c 7)
-    if [ "$_PY_MAJOR_VERSION" != 3 ] && [ "$_PY_MAJOR_VERSION" != 2 ]; then
-        echoerror "Detected -x option, but Python major version is not 2 or 3."
-        echoerror "The -x option must be passed as python2, python27, or python2.7 (or use the Python '3' versions of examples)."
+    TEST_PY_MAJOR_VERSION=$(echo "$_PY_PKG_VER" | cut -c 7)
+    if [ "$TEST_PY_MAJOR_VERSION" -eq 2 ]; then
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
+    fi
+
+    if [ "$TEST_PY_MAJOR_VERSION" != 3 ]; then
+        echoerror "Detected -x option, but Python major version is not 3."
+        echoerror "The -x option must be passed as python3, python38, or python3.8 (use the Python '3' versions of examples)."
         exit 1
     fi
 
     if [ "$_PY_EXE" != "python3" ]; then
         echoinfo "Detected -x option. Using $_PY_EXE to install Salt."
     fi
-else
-    _PY_PKG_VER=""
-    _PY_MAJOR_VERSION=""
 fi
 
 # If the configuration directory or archive does not exist, error out
@@ -837,7 +851,7 @@ fi
 
 # -a and -V only work from git
 if [ "$ITYPE" != "git" ]; then
-    if [ $_PIP_ALL -eq $BS_TRUE ]; then
+    if [ "$_PIP_ALL" -eq $BS_TRUE ]; then
         echoerror "Pip installing all python packages with -a is only possible when installing Salt via git"
         exit 1
     fi
@@ -847,7 +861,7 @@ if [ "$ITYPE" != "git" ]; then
     fi
 fi
 
-# Set the _REPO_URL value based on if -R was passed or not. Defaults to repo.saltproject.io.
+# Set the _REPO_URL value based on if -R was passed or not. Defaults to packages.broadcom.com/artifactory
 if [ "$_CUSTOM_REPO_URL" != "null" ]; then
     _REPO_URL="$_CUSTOM_REPO_URL"
 
@@ -918,6 +932,7 @@ fi
 #  DESCRIPTION:  Retrieves a URL and writes it to a given path
 #----------------------------------------------------------------------------------------------------------------------
 __fetch_url() {
+
     # shellcheck disable=SC2086
     curl $_CURL_ARGS -L -s -f -o "$1" "$2" >/dev/null 2>&1     ||
         wget $_WGET_ARGS -q -O "$1" "$2" >/dev/null 2>&1       ||
@@ -932,6 +947,7 @@ __fetch_url() {
 #  DESCRIPTION:  Retrieves a URL, verifies its content and writes it to standard output
 #----------------------------------------------------------------------------------------------------------------------
 __fetch_verify() {
+
     fetch_verify_url="$1"
     fetch_verify_sum="$2"
     fetch_verify_size="$3"
@@ -953,6 +969,7 @@ __fetch_verify() {
 #  DESCRIPTION:  Checks if a URL exists
 #----------------------------------------------------------------------------------------------------------------------
 __check_url_exists() {
+
   _URL="$1"
   if curl --output /dev/null --silent --fail "${_URL}"; then
     return 0
@@ -1029,17 +1046,12 @@ __derive_debian_numeric_version() {
         INPUT_VERSION="$(cat /etc/debian_version)"
     fi
     if [ -z "$NUMERIC_VERSION" ]; then
-        if [ "$INPUT_VERSION" = "wheezy/sid" ]; then
-            # I've found an EC2 wheezy image which did not tell its version
-            NUMERIC_VERSION=$(__parse_version_string "7.0")
-        elif [ "$INPUT_VERSION" = "jessie/sid" ]; then
-            NUMERIC_VERSION=$(__parse_version_string "8.0")
-        elif [ "$INPUT_VERSION" = "stretch/sid" ]; then
-            NUMERIC_VERSION=$(__parse_version_string "9.0")
-        elif [ "$INPUT_VERSION" = "buster/sid" ]; then
-            NUMERIC_VERSION=$(__parse_version_string "10.0")
-        elif [ "$INPUT_VERSION" = "bullseye/sid" ]; then
+        if [ "$INPUT_VERSION" = "bullseye/sid" ]; then
             NUMERIC_VERSION=$(__parse_version_string "11.0")
+        elif [ "$INPUT_VERSION" = "bookworm/sid" ]; then
+            NUMERIC_VERSION=$(__parse_version_string "12.0")
+        elif [ "$INPUT_VERSION" = "trixie/sid" ]; then
+            NUMERIC_VERSION=$(__parse_version_string "13.0")
         else
             echowarn "Unable to parse the Debian Version (codename: '$INPUT_VERSION')"
         fi
@@ -1123,6 +1135,7 @@ __gather_linux_system_info() {
     DISTRO_VERSION=""
 
     # Let's test if the lsb_release binary is available
+    # shellcheck disable=SC2327,SC2328
     rv=$(lsb_release >/dev/null 2>&1)
 
     # shellcheck disable=SC2181
@@ -1180,7 +1193,7 @@ __gather_linux_system_info() {
         # We already have the distribution name and version
         return
     fi
-    # shellcheck disable=SC2035,SC2086
+    # shellcheck disable=SC2035,SC2086,SC2269
     for rsource in $(__sort_release_files "$(
             cd /etc && /bin/ls *[_-]release *[_-]version 2>/dev/null | env -i sort | \
             sed -e '/^redhat-release$/d' -e '/^lsb-release$/d'; \
@@ -1282,127 +1295,6 @@ __gather_linux_system_info() {
 }
 
 
-#---  FUNCTION  -------------------------------------------------------------------------------------------------------
-#          NAME:  __install_python()
-#   DESCRIPTION:  Install a different version of python on a host. Currently this has only been tested on CentOS 6 and
-#                 is considered experimental.
-#----------------------------------------------------------------------------------------------------------------------
-__install_python() {
-    if [ "$_PY_EXE" = "" ]; then
-        echoerror "Must specify -x <pythonversion> with -y to install a specific python version"
-        exit 1
-    fi
-
-    __PACKAGES="$_PY_PKG_VER"
-
-    if [ ${_DISABLE_REPOS} -eq ${BS_FALSE} ]; then
-        echoinfo "Attempting to install a repo to help provide a separate python package"
-        echoinfo "$DISTRO_NAME_L"
-        case "$DISTRO_NAME_L" in
-            "red_hat"|"centos")
-                __PYTHON_REPO_URL="https://repo.ius.io/ius-release-el${DISTRO_MAJOR_VERSION}.rpm"
-                ;;
-            *)
-                echoerror "Installing a repo to provide a python package is only supported on Redhat/CentOS.
-                If a repo is already available, please try running script with -r."
-                exit 1
-                ;;
-        esac
-
-        echoinfo "Installing IUS repo"
-        __yum_install_noinput "${__PYTHON_REPO_URL}" || return 1
-    fi
-
-    echoinfo "Installing ${__PACKAGES}"
-    __yum_install_noinput "${__PACKAGES}" || return 1
-}
-
-
-#---  FUNCTION  -------------------------------------------------------------------------------------------------------
-#          NAME:  __gather_sunos_system_info
-#   DESCRIPTION:  Discover SunOS system info
-#----------------------------------------------------------------------------------------------------------------------
-__gather_sunos_system_info() {
-    if [ -f /sbin/uname ]; then
-        DISTRO_VERSION=$(/sbin/uname -X | awk '/[kK][eE][rR][nN][eE][lL][iI][dD]/ { print $3 }')
-    fi
-
-    DISTRO_NAME=""
-    if [ -f /etc/release ]; then
-        while read -r line; do
-            [ "${DISTRO_NAME}" != "" ] && break
-            case "$line" in
-                *OpenIndiana*oi_[0-9]*)
-                    DISTRO_NAME="OpenIndiana"
-                    DISTRO_VERSION=$(echo "$line" | sed -nE "s/OpenIndiana(.*)oi_([[:digit:]]+)(.*)/\\2/p")
-                    break
-                    ;;
-                *OpenSolaris*snv_[0-9]*)
-                    DISTRO_NAME="OpenSolaris"
-                    DISTRO_VERSION=$(echo "$line" | sed -nE "s/OpenSolaris(.*)snv_([[:digit:]]+)(.*)/\\2/p")
-                    break
-                    ;;
-                *Oracle*Solaris*[0-9]*)
-                    DISTRO_NAME="Oracle Solaris"
-                    DISTRO_VERSION=$(echo "$line" | sed -nE "s/(Oracle Solaris) ([[:digit:]]+)(.*)/\\2/p")
-                    break
-                    ;;
-                *Solaris*)
-                    DISTRO_NAME="Solaris"
-                    # Let's make sure we not actually on a Joyent's SmartOS VM since some releases
-                    # don't have SmartOS in `/etc/release`, only `Solaris`
-                    if uname -v | grep joyent >/dev/null 2>&1; then
-                        DISTRO_NAME="SmartOS"
-                    fi
-                    break
-                    ;;
-                *NexentaCore*)
-                    DISTRO_NAME="Nexenta Core"
-                    break
-                    ;;
-                *SmartOS*)
-                    DISTRO_NAME="SmartOS"
-                    break
-                    ;;
-                *OmniOS*)
-                    DISTRO_NAME="OmniOS"
-                    DISTRO_VERSION=$(echo "$line" | awk '{print $3}')
-                    _SIMPLIFY_VERSION=$BS_FALSE
-                    break
-                    ;;
-            esac
-        done < /etc/release
-    fi
-
-    if [ "${DISTRO_NAME}" = "" ]; then
-        DISTRO_NAME="Solaris"
-        DISTRO_VERSION=$(
-            echo "${OS_VERSION}" |
-            sed -e 's;^4\.;1.;' \
-                -e 's;^5\.\([0-6]\)[^0-9]*$;2.\1;' \
-                -e 's;^5\.\([0-9][0-9]*\).*;\1;'
-        )
-    fi
-
-    if [ "${DISTRO_NAME}" = "SmartOS" ]; then
-        VIRTUAL_TYPE="smartmachine"
-        if [ "$(zonename)" = "global" ]; then
-            VIRTUAL_TYPE="global"
-        fi
-    fi
-}
-
-
-#---  FUNCTION  -------------------------------------------------------------------------------------------------------
-#          NAME:  __gather_bsd_system_info
-#   DESCRIPTION:  Discover OpenBSD, NetBSD and FreeBSD systems information
-#----------------------------------------------------------------------------------------------------------------------
-__gather_bsd_system_info() {
-    DISTRO_NAME=${OS_NAME}
-    DISTRO_VERSION=$(echo "${OS_VERSION}" | sed -e 's;[()];;' -e 's/-.*$//')
-}
-
-
 #---  FUNCTION  -------------------------------------------------------------------------------------------------------
 #          NAME:  __gather_osx_system_info
 #   DESCRIPTION:  Discover MacOS X
@@ -1422,12 +1314,6 @@ __gather_system_info() {
         linux )
             __gather_linux_system_info
             ;;
-        sunos )
-            __gather_sunos_system_info
-            ;;
-        openbsd|freebsd|netbsd )
-            __gather_bsd_system_info
-            ;;
         darwin )
             __gather_osx_system_info
             ;;
@@ -1448,21 +1334,23 @@ __gather_system_info() {
 #----------------------------------------------------------------------------------------------------------------------
 # shellcheck disable=SC2034
 __ubuntu_derivatives_translation() {
-    UBUNTU_DERIVATIVES="(trisquel|linuxmint|linaro|elementary_os|neon|pop)"
+    UBUNTU_DERIVATIVES="(trisquel|linuxmint|elementary_os|pop|neon)"
     # Mappings
-    trisquel_6_ubuntu_base="12.04"
-    linuxmint_13_ubuntu_base="12.04"
-    linuxmint_17_ubuntu_base="14.04"
-    linuxmint_18_ubuntu_base="16.04"
-    linuxmint_19_ubuntu_base="18.04"
-    linuxmint_20_ubuntu_base="20.04"
-    linaro_12_ubuntu_base="12.04"
-    elementary_os_02_ubuntu_base="12.04"
-    neon_16_ubuntu_base="16.04"
-    neon_18_ubuntu_base="18.04"
+    trisquel_10_ubuntu_base="20.04"
+    trisquel_11_ubuntu_base="22.04"
+    trisquel_12_ubuntu_base="24.04"
     neon_20_ubuntu_base="20.04"
     neon_22_ubuntu_base="22.04"
+    neon_24_ubuntu_base="24.04"
+    linuxmint_20_ubuntu_base="20.04"
+    linuxmint_21_ubuntu_base="22.04"
+    linuxmint_22_ubuntu_base="24.04"
+    elementary_os_06_ubuntu_base="20.04"
+    elementary_os_07_ubuntu_base="22.04"
+    elementary_os_08_ubuntu_base="24.04"
+    pop_20_ubuntu_base="22.04"
     pop_22_ubuntu_base="22.04"
+    pop_24_ubuntu_base="24.04"
 
     # Translate Ubuntu derivatives to their base Ubuntu version
     match=$(echo "$DISTRO_NAME_L" | grep -E ${UBUNTU_DERIVATIVES})
@@ -1505,42 +1393,24 @@ __check_dpkg_architecture() {
         return 1
     fi
 
-    __REPO_ARCH="$DPKG_ARCHITECTURE"
-    __REPO_ARCH_DEB='deb [signed-by=/usr/share/keyrings/salt-archive-keyring.gpg]'
     __return_code=0
 
     case $DPKG_ARCHITECTURE in
         "i386")
-            error_msg="$_REPO_URL likely doesn't have all required 32-bit packages for $DISTRO_NAME $DISTRO_MAJOR_VERSION."
+            error_msg="$_REPO_URL likely doesn't have required 32-bit packages for $DISTRO_NAME $DISTRO_MAJOR_VERSION."
             # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location
-            __REPO_ARCH="amd64"
+            __return_code=1
             ;;
         "amd64")
             error_msg=""
             ;;
         "arm64")
-            if [ "$_CUSTOM_REPO_URL" != "null" ]; then
-                warn_msg="Support for arm64 is experimental, make sure the custom repository used has the expected structure and contents."
-            else
-                # Saltstack official repository has arm64 metadata beginning with Debian 10,
-                # use amd64 repositories on arm64 for anything older, since all pkgs are arch-independent
-                if [ "$DISTRO_NAME_L" = "debian" ] && [ "$DISTRO_MAJOR_VERSION" -lt 10 ]; then
-                  __REPO_ARCH="amd64"
-                else
-                  __REPO_ARCH="arm64"
-                fi
-                __REPO_ARCH_DEB="deb [signed-by=/usr/share/keyrings/salt-archive-keyring.gpg arch=$__REPO_ARCH]"
-                warn_msg="Support for arm64 packages is experimental and might rely on architecture-independent packages from the amd64 repository."
-            fi
+            # Saltstack official repository has full arm64 support since 3006
             error_msg=""
             ;;
         "armhf")
-            if [ "$DISTRO_NAME_L" = "ubuntu" ] || [ "$DISTRO_MAJOR_VERSION" -lt 8 ]; then
-                error_msg="Support for armhf packages at $_REPO_URL is limited to Debian/Raspbian 8 platforms."
-                __return_code=1
-            else
-                error_msg=""
-            fi
+            error_msg="$_REPO_URL doesn't have packages for your system architecture: $DPKG_ARCHITECTURE."
+            __return_code=1
             ;;
         *)
             error_msg="$_REPO_URL doesn't have packages for your system architecture: $DPKG_ARCHITECTURE."
@@ -1556,10 +1426,10 @@ __check_dpkg_architecture() {
     if [ "${error_msg}" != "" ]; then
         echoerror "${error_msg}"
         if [ "$ITYPE" != "git" ]; then
-            echoerror "You can try git installation mode, i.e.: sh ${__ScriptName} git v2017.7.2."
+            echoerror "You can try git installation mode, i.e.: sh ${__ScriptName} git v3006.6."
             echoerror "It may be necessary to use git installation mode with pip and disable the SaltStack apt repository."
             echoerror "For example:"
-            echoerror "    sh ${__ScriptName} -r -P git v2017.7.2"
+            echoerror "    sh ${__ScriptName} -r -P git v3006.6"
         fi
     fi
 
@@ -1611,8 +1481,14 @@ __ubuntu_codename_translation() {
         "22")
             DISTRO_CODENAME="jammy"
             ;;
+        "23")
+            DISTRO_CODENAME="lunar"
+            ;;
+        "24")
+            DISTRO_CODENAME="noble"
+            ;;
         *)
-            DISTRO_CODENAME="trusty"
+            DISTRO_CODENAME="noble"
             ;;
     esac
 }
@@ -1631,20 +1507,21 @@ __debian_derivatives_translation() {
 
     DEBIAN_DERIVATIVES="(cumulus|devuan|kali|linuxmint|raspbian|bunsenlabs|turnkey)"
     # Mappings
-    cumulus_2_debian_base="7.0"
-    cumulus_3_debian_base="8.0"
-    cumulus_4_debian_base="10.0"
-    devuan_1_debian_base="8.0"
-    devuan_2_debian_base="9.0"
+    cumulus_5_debian_base="11.0"
+    cumulus_6_debian_base="12.0"
+    devuan_4_debian_base="11.0"
+    devuan_5_debian_base="12.0"
     kali_1_debian_base="7.0"
     kali_2021_debian_base="10.0"
-    linuxmint_1_debian_base="8.0"
-    raspbian_8_debian_base="8.0"
-    raspbian_9_debian_base="9.0"
-    raspbian_10_debian_base="10.0"
+    linuxmint_4_debian_base="11.0"
+    linuxmint_5_debian_base="12.0"
     raspbian_11_debian_base="11.0"
+    raspbian_12_debian_base="12.0"
     bunsenlabs_9_debian_base="9.0"
-    turnkey_9_debian_base="9.0"
+    bunsenlabs_11_debian_base="11.0"
+    bunsenlabs_12_debian_base="12.0"
+    turnkey_11_debian_base="11.0"
+    turnkey_12_debian_base="12.0"
 
     # Translate Debian derivatives to their base Debian version
     match=$(echo "$DISTRO_NAME_L" | grep -E ${DEBIAN_DERIVATIVES})
@@ -1712,14 +1589,9 @@ __debian_codename_translation() {
             ;;
         "12")
             DISTRO_CODENAME="bookworm"
-            # FIXME - TEMPORARY
-            # use bullseye packages until bookworm packages are available
-            DISTRO_CODENAME="bullseye"
-            DISTRO_MAJOR_VERSION=11
-            rv=11
             ;;
         *)
-            DISTRO_CODENAME="stretch"
+            DISTRO_CODENAME="bookworm"
             ;;
     esac
 }
@@ -1732,8 +1604,8 @@ __debian_codename_translation() {
 __check_end_of_life_versions() {
     case "${DISTRO_NAME_L}" in
         debian)
-            # Debian versions below 9 are not supported
-            if [ "$DISTRO_MAJOR_VERSION" -lt 9 ]; then
+            # Debian versions below 11 are not supported
+            if [ "$DISTRO_MAJOR_VERSION" -lt 11 ]; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    https://wiki.debian.org/DebianReleases"
@@ -1744,18 +1616,18 @@ __check_end_of_life_versions() {
         ubuntu)
             # Ubuntu versions not supported
             #
-            #  < 16.04
-            #  = 16.10
-            #  = 17.04, 17.10
-            #  = 18.10
-            #  = 19.04, 19.10
+            #  < 20.04
             #  = 20.10
-            if [ "$DISTRO_MAJOR_VERSION" -lt 16 ] || \
-                [ "$DISTRO_MAJOR_VERSION" -eq 17 ] || \
-                [ "$DISTRO_MAJOR_VERSION" -eq 19 ] || \
-                { [ "$DISTRO_MAJOR_VERSION" -eq 16 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
-                { [ "$DISTRO_MAJOR_VERSION" -eq 18 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
-                { [ "$DISTRO_MAJOR_VERSION" -eq 20 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; }; then
+            #  = 21.04, 21.10
+            #  = 22.10
+            #  = 23.04, 23.10
+            if [ "$DISTRO_MAJOR_VERSION" -lt 20 ] || \
+                { [ "$DISTRO_MAJOR_VERSION" -eq 20 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
+                { [ "$DISTRO_MAJOR_VERSION" -eq 21 ] && [ "$DISTRO_MINOR_VERSION" -eq 04 ]; } || \
+                { [ "$DISTRO_MAJOR_VERSION" -eq 21 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
+                { [ "$DISTRO_MAJOR_VERSION" -eq 22 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
+                { [ "$DISTRO_MAJOR_VERSION" -eq 23 ] && [ "$DISTRO_MINOR_VERSION" -eq 04 ]; } || \
+                { [ "$DISTRO_MAJOR_VERSION" -eq 23 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; }; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    https://wiki.ubuntu.com/Releases"
@@ -1799,8 +1671,8 @@ __check_end_of_life_versions() {
             ;;
 
         fedora)
-            # Fedora lower than 33 are no longer supported
-            if [ "$DISTRO_MAJOR_VERSION" -lt 33 ]; then
+            # Fedora lower than 38 are no longer supported
+            if [ "$DISTRO_MAJOR_VERSION" -lt 39 ]; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    https://fedoraproject.org/wiki/Releases"
@@ -1809,8 +1681,8 @@ __check_end_of_life_versions() {
             ;;
 
         centos)
-            # CentOS versions lower than 7 are no longer supported
-            if [ "$DISTRO_MAJOR_VERSION" -lt 7 ]; then
+            # CentOS versions lower than 8 are no longer supported
+            if [ "$DISTRO_MAJOR_VERSION" -lt 8 ]; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    http://wiki.centos.org/Download"
@@ -1819,8 +1691,8 @@ __check_end_of_life_versions() {
             ;;
 
         red_hat*linux)
-            # Red Hat (Enterprise) Linux versions lower than 7 are no longer supported
-            if [ "$DISTRO_MAJOR_VERSION" -lt 7 ]; then
+            # Red Hat (Enterprise) Linux versions lower than 8 are no longer supported
+            if [ "$DISTRO_MAJOR_VERSION" -lt 8 ]; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    https://access.redhat.com/support/policy/updates/errata/"
@@ -1829,8 +1701,8 @@ __check_end_of_life_versions() {
             ;;
 
         oracle*linux)
-            # Oracle Linux versions lower than 7 are no longer supported
-            if [ "$DISTRO_MAJOR_VERSION" -lt 7 ]; then
+            # Oracle Linux versions lower than 8 are no longer supported
+            if [ "$DISTRO_MAJOR_VERSION" -lt 8 ]; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    http://www.oracle.com/us/support/library/elsp-lifetime-069338.pdf"
@@ -1839,8 +1711,8 @@ __check_end_of_life_versions() {
             ;;
 
         scientific*linux)
-            # Scientific Linux versions lower than 7 are no longer supported
-            if [ "$DISTRO_MAJOR_VERSION" -lt 7 ]; then
+            # Scientific Linux versions lower than 8 are no longer supported
+            if [ "$DISTRO_MAJOR_VERSION" -lt 8 ]; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    https://www.scientificlinux.org/downloads/sl-versions/"
@@ -1849,8 +1721,8 @@ __check_end_of_life_versions() {
             ;;
 
         cloud*linux)
-            # Cloud Linux versions lower than 7 are no longer supported
-            if [ "$DISTRO_MAJOR_VERSION" -lt 7 ]; then
+            # Cloud Linux versions lower than 8 are no longer supported
+            if [ "$DISTRO_MAJOR_VERSION" -lt 8 ]; then
                 echoerror "End of life distributions are not supported."
                 echoerror "Please consider upgrading to the next stable. See:"
                 echoerror "    https://docs.cloudlinux.com/index.html?cloudlinux_life-cycle.html"
@@ -1869,20 +1741,11 @@ __check_end_of_life_versions() {
             fi
             ;;
 
-        freebsd)
-            # FreeBSD versions lower than 11 are EOL
-            if [ "$DISTRO_MAJOR_VERSION" -lt 11 ]; then
-                echoerror "Versions lower than FreeBSD 11 are EOL and no longer supported."
-                exit 1
-            fi
-            ;;
-
         *)
             ;;
     esac
 }
 
-
 __gather_system_info
 
 echo
@@ -1957,8 +1820,16 @@ if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ]; then
     fi
 fi
 
+if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+    if [ "$_CONFIG_ONLY" -eq $BS_FALSE ]; then
+        echoinfo "Installing salt api"
+    else
+        echoinfo "Configuring salt api"
+    fi
+fi
+
 if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ] && [ "$_CONFIG_ONLY" -eq $BS_FALSE ]; then
-    echoinfo "Installing salt-cloud and required python-libcloud package"
+    echoinfo "Installing salt-cloud and required python3-libcloud package"
 fi
 
 if [ $_START_DAEMONS -eq $BS_FALSE ]; then
@@ -2002,16 +1873,14 @@ fi
 if [ "$ITYPE" = "git" ]; then
 
     if [ "${GIT_REV}" = "master" ]; then
-        _POST_NEON_INSTALL=$BS_TRUE
         __TAG_REGEX_MATCH="MATCH"
     else
         case ${OS_NAME_L} in
-            openbsd|freebsd|netbsd|darwin )
+            darwin )
                 __NEW_VS_TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed -E 's/^(v?3[0-9]{3}(\.[0-9]{1,2})?).*$/MATCH/')
                 if [ "$__NEW_VS_TAG_REGEX_MATCH" = "MATCH" ]; then
-                    _POST_NEON_INSTALL=$BS_TRUE
                     __TAG_REGEX_MATCH="${__NEW_VS_TAG_REGEX_MATCH}"
-                    echodebug "Post Neon Tag Regex Match On: ${GIT_REV}"
+                    echodebug "Tag Regex Match On: ${GIT_REV}"
                 else
                     __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed -E 's/^(v?[0-9]{1,4}\.[0-9]{1,2})(\.[0-9]{1,2})?.*$/MATCH/')
                     echodebug "Pre Neon Tag Regex Match On: ${GIT_REV}"
@@ -2020,9 +1889,8 @@ if [ "$ITYPE" = "git" ]; then
             * )
                 __NEW_VS_TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed 's/^.*\(v\?3[[:digit:]]\{3\}\(\.[[:digit:]]\{1,2\}\)\?\).*$/MATCH/')
                 if [ "$__NEW_VS_TAG_REGEX_MATCH" = "MATCH" ]; then
-                    _POST_NEON_INSTALL=$BS_TRUE
                     __TAG_REGEX_MATCH="${__NEW_VS_TAG_REGEX_MATCH}"
-                    echodebug "Post Neon Tag Regex Match On: ${GIT_REV}"
+                    echodebug "Tag Regex Match On: ${GIT_REV}"
                 else
                     __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed 's/^.*\(v\?[[:digit:]]\{1,4\}\.[[:digit:]]\{1,2\}\)\(\.[[:digit:]]\{1,2\}\)\?.*$/MATCH/')
                     echodebug "Pre Neon Tag Regex Match On: ${GIT_REV}"
@@ -2031,18 +1899,16 @@ if [ "$ITYPE" = "git" ]; then
         esac
     fi
 
-    if [ "$_POST_NEON_INSTALL" -eq $BS_TRUE ]; then
-        echo
-        echowarn "Post Neon git based installations will always install salt"
-        echowarn "and its dependencies using pip which will be upgraded to"
-        echowarn "at least v${_MINIMUM_PIP_VERSION}, and, in case the setuptools version is also"
-        echowarn "too old, it will be upgraded to at least v${_MINIMUM_SETUPTOOLS_VERSION}"
-        echo
-        echowarn "You have 10 seconds to cancel and stop the bootstrap process..."
-        echo
-        sleep 10
-        _PIP_ALLOWED=$BS_TRUE
-    fi
+    echo
+    echowarn "git based installations will always install salt"
+    echowarn "and its dependencies using pip which will be upgraded to"
+    echowarn "at least v${_MINIMUM_PIP_VERSION}, and, in case the setuptools version is also"
+    echowarn "too old, it will be upgraded to at least v${_MINIMUM_SETUPTOOLS_VERSION} and less than v${_MAXIMUM_SETUPTOOLS_VERSION}"
+    echo
+    echowarn "You have 10 seconds to cancel and stop the bootstrap process..."
+    echo
+    sleep 10
+    _PIP_ALLOWED=$BS_TRUE
 fi
 
 
@@ -2071,15 +1937,21 @@ __function_defined() {
 #                 process is finished so the script doesn't exit on a locked proc.
 #----------------------------------------------------------------------------------------------------------------------
 __wait_for_apt(){
+
     # Timeout set at 15 minutes
     WAIT_TIMEOUT=900
 
+    ## see if sync'ing the clocks helps
+    if [ -f /usr/sbin/hwclock ]; then
+        /usr/sbin/hwclock -s
+    fi
+
     # Run our passed in apt command
     "${@}" 2>"$APT_ERR"
     APT_RETURN=$?
 
     # Make sure we're not waiting on a lock
-    while [ $APT_RETURN -ne 0 ] && grep -q '^E: Could not get lock' "$APT_ERR"; do
+    while [ "$APT_RETURN" -ne 0 ] && grep -q '^E: Could not get lock' "$APT_ERR"; do
         echoinfo "Aware of the lock. Patiently waiting $WAIT_TIMEOUT more seconds..."
         sleep 1
         WAIT_TIMEOUT=$((WAIT_TIMEOUT - 1))
@@ -2103,6 +1975,7 @@ __wait_for_apt(){
 #    PARAMETERS:  packages
 #----------------------------------------------------------------------------------------------------------------------
 __apt_get_install_noinput() {
+
     __wait_for_apt apt-get install -y -o DPkg::Options::=--force-confold "${@}"; return $?
 }   # ----------  end of function __apt_get_install_noinput  ----------
 
@@ -2112,6 +1985,7 @@ __apt_get_install_noinput() {
 #   DESCRIPTION:  (DRY) apt-get upgrade with noinput options
 #----------------------------------------------------------------------------------------------------------------------
 __apt_get_upgrade_noinput() {
+
     __wait_for_apt apt-get upgrade -y -o DPkg::Options::=--force-confold; return $?
 }   # ----------  end of function __apt_get_upgrade_noinput  ----------
 
@@ -2142,12 +2016,13 @@ __temp_gpg_pub() {
 #    PARAMETERS:  url
 #----------------------------------------------------------------------------------------------------------------------
 __apt_key_fetch() {
+
     url=$1
 
     tempfile="$(__temp_gpg_pub)"
-
     __fetch_url "$tempfile" "$url" || return 1
-    cp -f "$tempfile" /usr/share/keyrings/salt-archive-keyring.gpg && chmod 644 /usr/share/keyrings/salt-archive-keyring.gpg || return 1
+    mkdir -p /etc/apt/keyrings
+    cp -f "$tempfile" /etc/apt/keyrings/salt-archive-keyring.pgp && chmod 644 /etc/apt/keyrings/salt-archive-keyring.pgp || return 1
     rm -f "$tempfile"
 
     return 0
@@ -2160,6 +2035,7 @@ __apt_key_fetch() {
 #    PARAMETERS:  url
 #----------------------------------------------------------------------------------------------------------------------
 __rpm_import_gpg() {
+
     url=$1
 
     tempfile="$(__temp_gpg_pub)"
@@ -2217,6 +2093,7 @@ __tdnf_install_noinput() {
 #   DESCRIPTION:  (DRY) Helper function to clone and checkout salt to a
 #                 specific revision.
 #----------------------------------------------------------------------------------------------------------------------
+# shellcheck disable=SC2120
 __git_clone_and_checkout() {
 
     echodebug "Installed git version: $(git --version | awk '{ print $3 }')"
@@ -2225,6 +2102,12 @@ __git_clone_and_checkout() {
         export GIT_SSL_NO_VERIFY=1
     fi
 
+    if [ "$(echo "$GIT_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+        GIT_REV_ADJ="$GIT_REV.x"  # branches are 3006.x or 3007.x
+    else
+        GIT_REV_ADJ="$GIT_REV"
+    fi
+
     __SALT_GIT_CHECKOUT_PARENT_DIR=$(dirname "${_SALT_GIT_CHECKOUT_DIR}" 2>/dev/null)
     __SALT_GIT_CHECKOUT_PARENT_DIR="${__SALT_GIT_CHECKOUT_PARENT_DIR:-/tmp/git}"
     __SALT_CHECKOUT_REPONAME="$(basename "${_SALT_GIT_CHECKOUT_DIR}" 2>/dev/null)"
@@ -2253,15 +2136,15 @@ __git_clone_and_checkout() {
             git fetch --tags upstream
         fi
 
-        echodebug "Hard reseting the cloned repository to ${GIT_REV}"
-        git reset --hard "$GIT_REV" || return 1
+        echodebug "Hard reseting the cloned repository to ${GIT_REV_ADJ}"
+        git reset --hard "$GIT_REV_ADJ" || return 1
 
-        # Just calling `git reset --hard $GIT_REV` on a branch name that has
+        # Just calling `git reset --hard $GIT_REV_ADJ` on a branch name that has
         # already been checked out will not update that branch to the upstream
         # HEAD; instead it will simply reset to itself.  Check the ref to see
         # if it is a branch name, check out the branch, and pull in the
         # changes.
-        if git branch -a | grep -q "${GIT_REV}"; then
+        if git branch -a | grep -q "${GIT_REV_ADJ}"; then
             echodebug "Rebasing the cloned repository branch"
             git pull --rebase || return 1
         fi
@@ -2283,11 +2166,16 @@ __git_clone_and_checkout() {
             # cloning we need actually works
             if [ "$(git clone 2>&1 | grep 'single-branch')" != "" ]; then
                 # The "--single-branch" option is supported, attempt shallow cloning
-                echoinfo "Attempting to shallow clone $GIT_REV from Salt's repository ${_SALT_REPO_URL}"
-                if git clone --depth 1 --branch "$GIT_REV" "$_SALT_REPO_URL" "$__SALT_CHECKOUT_REPONAME"; then
+                echoinfo "Attempting to shallow clone $GIT_REV_ADJ from Salt's repository ${_SALT_REPO_URL}"
+                ## Shallow cloning is resulting in the wrong version of Salt, even with a depth of 5
+                ## getting 3007.0+0na.246d066 when it should be 3007.1+410.g246d066457, disabling for now
+                ## if git clone --depth 1 --branch "$GIT_REV_ADJ" "$_SALT_REPO_URL" "$__SALT_CHECKOUT_REPONAME"; then
+                echodebug "git command, git clone --branch $GIT_REV_ADJ $_SALT_REPO_URL $__SALT_CHECKOUT_REPONAME"
+                if git clone --branch "$GIT_REV_ADJ" "$_SALT_REPO_URL" "$__SALT_CHECKOUT_REPONAME"; then
                     # shellcheck disable=SC2164
                     cd "${_SALT_GIT_CHECKOUT_DIR}"
                     __SHALLOW_CLONE=$BS_TRUE
+                    echoinfo  "shallow path (disabled shallow) git cloned $GIT_REV_ADJ, version $(python3 salt/version.py)"
                 else
                     # Shallow clone above failed(missing upstream tags???), let's resume the old behaviour.
                     echowarn "Failed to shallow clone."
@@ -2301,10 +2189,13 @@ __git_clone_and_checkout() {
         fi
 
         if [ "$__SHALLOW_CLONE" -eq $BS_FALSE ]; then
+            echodebug "shallow clone false, BS_FALSE $BS_FALSE, git clone $_SALT_REPO_URL $__SALT_CHECKOUT_REPONAME"
             git clone "$_SALT_REPO_URL" "$__SALT_CHECKOUT_REPONAME" || return 1
             # shellcheck disable=SC2164
             cd "${_SALT_GIT_CHECKOUT_DIR}"
 
+            echoinfo  "git cloned $GIT_REV_ADJ, version $(python3 salt/version.py)"
+
             if ! echo "$_SALT_REPO_URL" | grep -q -F -w "${_SALTSTACK_REPO_URL#*://}"; then
                 # We need to add the saltstack repository as a remote and fetch tags for proper versioning
                 echoinfo "Adding SaltStack's Salt repository as a remote"
@@ -2313,14 +2204,14 @@ __git_clone_and_checkout() {
                 echodebug "Fetching upstream (SaltStack's Salt repository) git tags"
                 git fetch --tags upstream || return 1
 
-                # Check if GIT_REV is a remote branch or just a commit hash
-                if git branch -r | grep -q -F -w "origin/$GIT_REV"; then
-                    GIT_REV="origin/$GIT_REV"
+                # Check if GIT_REV_ADJ is a remote branch or just a commit hash
+                if git branch -r | grep -q -F -w "origin/$GIT_REV_ADJ"; then
+                    GIT_REV_ADJ="origin/$GIT_REV_ADJ"
                 fi
             fi
 
-            echodebug "Checking out $GIT_REV"
-            git checkout "$GIT_REV" || return 1
+            echodebug "Checking out $GIT_REV_ADJ"
+            git checkout "$GIT_REV_ADJ" || return 1
         fi
 
     fi
@@ -2397,7 +2288,7 @@ __movefile() {
         exit 1
     fi
 
-    if [ $_KEEP_TEMP_FILES -eq $BS_TRUE ]; then
+    if [ "$_KEEP_TEMP_FILES" -eq $BS_TRUE ]; then
         # We're being told not to move files, instead copy them so we can keep
         # them around
         echodebug "Since BS_KEEP_TEMP_FILES=1 we're copying files instead of moving them"
@@ -2514,14 +2405,17 @@ __overwriteconfig() {
     if [ -n "$_PY_EXE" ]; then
         good_python="$_PY_EXE"
     # If python does not have yaml installed we're on Arch and should use python2
+    # but no more support, hence error out
     elif python -c "import yaml" 2> /dev/null; then
-        good_python=python
+        good_python=python  # assume python is python 3 on Arch
     else
-        good_python=python2
+        ## good_python=python2
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     fi
 
     # Convert json string to a yaml string and write it to config file. Output is dumped into tempfile.
-    "$good_python" -c "import json; import yaml; jsn=json.loads('$json'); yml=yaml.safe_dump(jsn, line_break='\\n', default_flow_style=False); config_file=open('$target', 'w'); config_file.write(yml); config_file.close();" 2>$tempfile
+    "$good_python" -c "import json; import yaml; jsn=json.loads('$json'); yml=yaml.safe_dump(jsn, line_break='\\n', default_flow_style=False, sort_keys=False); config_file=open('$target', 'w'); config_file.write(yml); config_file.close();" 2>"$tempfile"
 
     # No python errors output to the tempfile
     if [ ! -s "$tempfile" ]; then
@@ -2546,6 +2440,7 @@ __overwriteconfig() {
 #    PARAMETERS:  servicename
 #----------------------------------------------------------------------------------------------------------------------
 __check_services_systemd() {
+
     if [ $# -eq 0 ]; then
         echoerror "You need to pass a service name to check!"
         exit 1
@@ -2553,6 +2448,23 @@ __check_services_systemd() {
         echoerror "You need to pass a service name to check as the single argument to the function"
     fi
 
+    # check if systemd is functional, having systemctl present is insufficient
+
+    if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_FALSE ]; then
+        # already determined systemd is not functional, default is 1
+        return 1
+    fi
+
+    _SYSTEMD_ACTIVE=$(/bin/systemctl daemon-reload 2>&1 | grep 'System has not been booted with systemd')
+    echodebug "__check_services_systemd _SYSTEMD_ACTIVE result ,$_SYSTEMD_ACTIVE,"
+    if [ -n "$_SYSTEMD_ACTIVE" ]; then
+        _SYSTEMD_FUNCTIONAL=$BS_FALSE
+        echodebug "systemd is not functional, despite systemctl being present, setting _SYSTEMD_FUNCTIONAL false, $_SYSTEMD_FUNCTIONAL"
+        return 1
+    else
+        echodebug "systemd is functional, _SYSTEMD_FUNCTIONAL true, $_SYSTEMD_FUNCTIONAL"
+    fi
+
     servicename=$1
     echodebug "Checking if service ${servicename} is enabled"
 
@@ -2572,6 +2484,7 @@ __check_services_systemd() {
 #    PARAMETERS:  servicename
 #----------------------------------------------------------------------------------------------------------------------
 __check_services_upstart() {
+
     if [ $# -eq 0 ]; then
         echoerror "You need to pass a service name to check!"
         exit 1
@@ -2599,6 +2512,7 @@ __check_services_upstart() {
 #    PARAMETERS:  servicename
 #----------------------------------------------------------------------------------------------------------------------
 __check_services_sysvinit() {
+
     if [ $# -eq 0 ]; then
         echoerror "You need to pass a service name to check!"
         exit 1
@@ -2625,6 +2539,7 @@ __check_services_sysvinit() {
 #    PARAMETERS:  servicename
 #----------------------------------------------------------------------------------------------------------------------
 __check_services_debian() {
+
     if [ $# -eq 0 ]; then
         echoerror "You need to pass a service name to check!"
         exit 1
@@ -2646,38 +2561,13 @@ __check_services_debian() {
 }   # ----------  end of function __check_services_debian  ----------
 
 
-#---  FUNCTION  -------------------------------------------------------------------------------------------------------
-#          NAME:  __check_services_openbsd
-#   DESCRIPTION:  Return 0 or 1 in case the service is enabled or not
-#    PARAMETERS:  servicename
-#----------------------------------------------------------------------------------------------------------------------
-__check_services_openbsd() {
-    if [ $# -eq 0 ]; then
-        echoerror "You need to pass a service name to check!"
-        exit 1
-    elif [ $# -ne 1 ]; then
-        echoerror "You need to pass a service name to check as the single argument to the function"
-    fi
-
-    servicename=$1
-    echodebug "Checking if service ${servicename} is enabled"
-
-    # shellcheck disable=SC2086,SC2046,SC2144
-    if rcctl get ${servicename} status; then
-        echodebug "Service ${servicename} is enabled"
-        return 0
-    else
-        echodebug "Service ${servicename} is NOT enabled"
-        return 1
-    fi
-}   # ----------  end of function __check_services_openbsd  ----------
-
 #---  FUNCTION  -------------------------------------------------------------------------------------------------------
 #          NAME:  __check_services_openrc
 #   DESCRIPTION:  Return 0 or 1 in case the service is enabled or not
 #    PARAMETERS:  servicename
 #----------------------------------------------------------------------------------------------------------------------
 __check_services_openrc() {
+
     if [ $# -eq 0 ]; then
         echoerror "You need to pass a service name to check!"
         exit 1
@@ -2704,9 +2594,10 @@ __check_services_openrc() {
 #   DESCRIPTION:  Return 0 or 1 depending on successful creation of virtualenv
 #----------------------------------------------------------------------------------------------------------------------
 __create_virtualenv() {
+
     if [ ! -d "$_VIRTUALENV_DIR" ]; then
         echoinfo "Creating virtualenv ${_VIRTUALENV_DIR}"
-        if [ $_PIP_ALL -eq $BS_TRUE ]; then
+        if [ "$_PIP_ALL" -eq $BS_TRUE ]; then
             virtualenv --no-site-packages "${_VIRTUALENV_DIR}" || return 1
         else
             virtualenv --system-site-packages "${_VIRTUALENV_DIR}" || return 1
@@ -2721,6 +2612,7 @@ __create_virtualenv() {
 #   DESCRIPTION:  Return 0 or 1 depending on successful activation of virtualenv
 #----------------------------------------------------------------------------------------------------------------------
 __activate_virtualenv() {
+
     set +o nounset
     # Is virtualenv empty
     if [ -z "$_VIRTUALENV_DIR" ]; then
@@ -2741,13 +2633,14 @@ __activate_virtualenv() {
 #----------------------------------------------------------------------------------------------------------------------
 
 __install_pip_pkgs() {
+
     _pip_pkgs="$1"
     _py_exe="$2"
     _py_pkg=$(echo "$_py_exe" | sed -E "s/\\.//g")
     _pip_cmd="${_py_exe} -m pip"
 
     if [ "${_py_exe}" = "" ]; then
-        _py_exe='python'
+        _py_exe='python3'
     fi
 
     __check_pip_allowed
@@ -2762,8 +2655,10 @@ __install_pip_pkgs() {
         else
             __PACKAGES="${__PACKAGES} ${_py_pkg}-devel"
             if [ "$DISTRO_NAME_L" = "fedora" ];then
+              dnf makecache || return 1
               __dnf_install_noinput ${__PACKAGES} || return 1
             else
+              yum makecache || return 1
               __yum_install_noinput ${__PACKAGES} || return 1
             fi
         fi
@@ -2775,17 +2670,6 @@ __install_pip_pkgs() {
     ${_pip_cmd} install ${_pip_pkgs} || return 1
 }
 
-#---  FUNCTION  -------------------------------------------------------------------------------------------------------
-#          NAME:  __install_tornado_pip
-#    PARAMETERS:  python executable
-#   DESCRIPTION:  Return 0 or 1 if successfully able to install tornado<5.0
-#----------------------------------------------------------------------------------------------------------------------
-__install_tornado_pip() {
-    # OS needs tornado <5.0 from pip
-    __check_pip_allowed "You need to allow pip based installations (-P) for Tornado <5.0 in order to install Salt on Python 3"
-    ## install pip if its not installed and install tornado
-    __install_pip_pkgs "tornado<5.0" "${1}" || return 1
-}
 
 #---  FUNCTION  -------------------------------------------------------------------------------------------------------
 #          NAME:  __install_pip_deps
@@ -2793,6 +2677,7 @@ __install_tornado_pip() {
 #    PARAMETERS:  requirements_file
 #----------------------------------------------------------------------------------------------------------------------
 __install_pip_deps() {
+
     # Install virtualenv to system pip before activating virtualenv if thats going to be used
     # We assume pip pkg is installed since that is distro specific
     if [ "$_VIRTUALENV_DIR" != "null" ]; then
@@ -2823,19 +2708,20 @@ __install_pip_deps() {
 }   # ----------  end of function __install_pip_deps  ----------
 
 #---  FUNCTION  -------------------------------------------------------------------------------------------------------
-#          NAME:  __install_salt_from_repo_post_neon
+#          NAME:  __install_salt_from_repo
 #   DESCRIPTION:  Return 0 or 1 if successfully able to install. Can provide a different python version to
 #                 install pip packages with. If $py_exe is not specified it will use the default python version.
 #    PARAMETERS:  py_exe
 #----------------------------------------------------------------------------------------------------------------------
-__install_salt_from_repo_post_neon() {
+__install_salt_from_repo() {
+
     _py_exe="$1"
 
     if [ "${_py_exe}" = "" ]; then
-        _py_exe='python'
+        _py_exe="python3"
     fi
 
-    echodebug "__install_salt_from_repo_post_neon py_exe=$_py_exe"
+    echodebug "__install_salt_from_repo py_exe=$_py_exe"
 
     _py_version=$(${_py_exe} -c "import sys; print('{0}.{1}'.format(*sys.version_info))")
     _pip_cmd="pip${_py_version}"
@@ -2856,88 +2742,85 @@ __install_salt_from_repo_post_neon() {
 
     echodebug "Installed pip version: $(${_pip_cmd} --version)"
 
-    CHECK_PIP_VERSION_SCRIPT=$(cat << EOM
-import sys
-try:
-    import pip
-    installed_pip_version=tuple([int(part.strip()) for part in pip.__version__.split('.') if part.isdigit()])
-    desired_pip_version=($(echo ${_MINIMUM_PIP_VERSION} | sed 's/\./, /g' ))
-    if installed_pip_version < desired_pip_version:
-        print('Desired pip version {!r} > Installed pip version {!r}'.format('.'.join(map(str, desired_pip_version)), '.'.join(map(str, installed_pip_version))))
-        sys.exit(1)
-    print('Desired pip version {!r} < Installed pip version {!r}'.format('.'.join(map(str, desired_pip_version)), '.'.join(map(str, installed_pip_version))))
-    sys.exit(0)
-except ImportError:
-    print('Failed to import pip')
-    sys.exit(1)
-EOM
-)
-    if ! ${_py_exe} -c "$CHECK_PIP_VERSION_SCRIPT"; then
-        # Upgrade pip to at least 1.2 which is when we can start using "python -m pip"
-        if [ "${_py_version}" = "3.5" ]; then
-          echodebug "Running '${_pip_cmd} install ${_POST_NEON_PIP_INSTALL_ARGS} pip>=${_MINIMUM_PIP_VERSION},<21.0'"
-          ${_pip_cmd} install ${_POST_NEON_PIP_INSTALL_ARGS} -v "pip>=${_MINIMUM_PIP_VERSION},<21.0"
-        else
-          echodebug "Running '${_pip_cmd} install ${_POST_NEON_PIP_INSTALL_ARGS} pip>=${_MINIMUM_PIP_VERSION}'"
-          ${_pip_cmd} install ${_POST_NEON_PIP_INSTALL_ARGS} -v "pip>=${_MINIMUM_PIP_VERSION}"
-        fi
-        sleep 1
-        echodebug "PATH: ${PATH}"
-        _pip_cmd="pip${_py_version}"
-        if ! __check_command_exists "${_pip_cmd}"; then
-            echodebug "The pip binary '${_pip_cmd}' was not found in PATH"
-            _pip_cmd="pip$(echo "${_py_version}" | cut -c -1)"
-            if ! __check_command_exists "${_pip_cmd}"; then
-                echodebug "The pip binary '${_pip_cmd}' was not found in PATH"
-                _pip_cmd="pip"
-                if ! __check_command_exists "${_pip_cmd}"; then
-                    echoerror "Unable to find a pip binary"
-                    return 1
-                fi
-            fi
-        fi
-        echodebug "Installed pip version: $(${_pip_cmd} --version)"
+    _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION},<${_MAXIMUM_SETUPTOOLS_VERSION}"
+    if [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION}"
-    if [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-        # We also lock setuptools to <45 which is the latest release to support both py2 and py3
-        _setuptools_dep="${_setuptools_dep},<45"
+    _USE_BREAK_SYSTEM_PACKAGES=""
+    # shellcheck disable=SC2086,SC2090
+    if { [ ${DISTRO_NAME_L} = "ubuntu" ] && [ "$DISTRO_MAJOR_VERSION" -ge 24 ]; } || \
+        [ ${DISTRO_NAME_L} = "debian" ] && [ "$DISTRO_MAJOR_VERSION" -ge 12 ]; then
+        _USE_BREAK_SYSTEM_PACKAGES="--break-system-packages"
+        echodebug "OS is greater than / equal Debian 12 or Ubuntu 24.04, using ${_USE_BREAK_SYSTEM_PACKAGES}"
     fi
 
-    echodebug "Running '${_pip_cmd} install wheel ${_setuptools_dep}'"
-    ${_pip_cmd} install --upgrade ${_POST_NEON_PIP_INSTALL_ARGS} wheel "${_setuptools_dep}"
+    echodebug "Running '${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --upgrade ${_PIP_INSTALL_ARGS}  wheel ${_setuptools_dep}"
+    ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --upgrade ${_PIP_INSTALL_ARGS}  wheel "${_setuptools_dep}"
 
-    echoinfo "Installing salt using ${_py_exe}"
+    echoinfo "Installing salt using ${_py_exe}, $(${_py_exe} --version)"
     cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1
 
-    mkdir /tmp/git/deps
-    echoinfo "Downloading Salt Dependencies from PyPi"
-    echodebug "Running '${_pip_cmd} download -d /tmp/git/deps ${_PIP_DOWNLOAD_ARGS} .'"
-    ${_pip_cmd} download -d /tmp/git/deps ${_PIP_DOWNLOAD_ARGS} . || (echo "Failed to download salt dependencies" && return 1)
+    mkdir -p /tmp/git/deps
+    echodebug "Created directory /tmp/git/deps"
+
+    if [ ${DISTRO_NAME_L} = "ubuntu" ] && [ "$DISTRO_MAJOR_VERSION" -eq 22 ]; then
+        echodebug "Ubuntu 22.04 has problem with base.txt requirements file, not parsing sys_platform == 'win32', upgrading from default pip works"
+        echodebug "${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --upgrade  pip"
+        ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --upgrade  pip
+        # shellcheck disable=SC2181
+        if [ $? -ne 0 ]; then
+            echo "Failed to upgrade pip"
+            return 1
+        fi
+    fi
 
-    echoinfo "Installing Downloaded Salt Dependencies"
-    echodebug "Running '${_pip_cmd} install --ignore-installed ${_POST_NEON_PIP_INSTALL_ARGS} /tmp/git/deps/*'"
-    ${_pip_cmd} install --ignore-installed ${_POST_NEON_PIP_INSTALL_ARGS} /tmp/git/deps/* || return 1
     rm -f /tmp/git/deps/*
 
-    echoinfo "Building Salt Python Wheel"
+    echodebug "Installing Salt requirements from PyPi, ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --ignore-installed ${_PIP_INSTALL_ARGS} -r requirements/static/ci/py${_py_version}/linux.txt"
+    ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --ignore-installed ${_PIP_INSTALL_ARGS} -r "requirements/static/ci/py${_py_version}/linux.txt"
+    # shellcheck disable=SC2181
+    if [ $? -ne 0 ]; then
+        echo "Failed to install salt requirements for the version of Python ${_py_version}"
+        return 1
+    fi
 
+    if [ "${OS_NAME}" = "Linux" ]; then
+        ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --ignore-installed --upgrade ${_PIP_INSTALL_ARGS} "jaraco.functools==4.1.0" || return 1
+        ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --ignore-installed --upgrade ${_PIP_INSTALL_ARGS} "jaraco.text==4.0.0" || return 1
+        ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --ignore-installed --upgrade ${_PIP_INSTALL_ARGS} "jaraco.collections==5.1.0" || return 1
+        ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --ignore-installed --upgrade ${_PIP_INSTALL_ARGS} "jaraco.context==6.0.1" || return 1
+        ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --ignore-installed --upgrade ${_PIP_INSTALL_ARGS} "jaraco.classes==3.4.0" || return 1
+    fi
+
+    echoinfo "Building Salt Python Wheel"
     if [ "$_ECHO_DEBUG" -eq $BS_TRUE ]; then
         SETUP_PY_INSTALL_ARGS="-v"
     fi
 
     echodebug "Running '${_py_exe} setup.py --salt-config-dir=$_SALT_ETC_DIR --salt-cache-dir=${_SALT_CACHE_DIR} ${SETUP_PY_INSTALL_ARGS} bdist_wheel'"
-    ${_py_exe} setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} bdist_wheel || return 1
+    ${_py_exe} setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR} ${SETUP_PY_INSTALL_ARGS}" bdist_wheel || return 1
     mv dist/salt*.whl /tmp/git/deps/ || return 1
 
     cd "${__SALT_GIT_CHECKOUT_PARENT_DIR}" || return 1
 
     echoinfo "Installing Built Salt Wheel"
-    ${_pip_cmd} uninstall --yes salt 2>/dev/null || true
-    echodebug "Running '${_pip_cmd} install --no-deps --force-reinstall ${_POST_NEON_PIP_INSTALL_ARGS} /tmp/git/deps/salt*.whl'"
-    ${_pip_cmd} install --no-deps --force-reinstall \
-        ${_POST_NEON_PIP_INSTALL_ARGS} \
+    ${_pip_cmd} uninstall --yes ${_USE_BREAK_SYSTEM_PACKAGES} salt 2>/dev/null || true
+
+    # Hack for getting current Arch working with git-master
+    if [ "${DISTRO_NAME}"  = "Arch Linux" ]; then
+        _arch_dep="cryptography==42.0.7"    # debug matching current Arch version of python-cryptography
+        echodebug "Running '${_pip_cmd} install --force-reinstall --break-system-packages ${_arch_dep}'"
+        ${_pip_cmd} install --force-reinstall --break-system-packages "${_arch_dep}"
+    fi
+
+    echodebug "Running '${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --no-deps --force-reinstall ${_PIP_INSTALL_ARGS} /tmp/git/deps/salt*.whl'"
+
+    echodebug "Running ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --no-deps --force-reinstall ${_PIP_INSTALL_ARGS} --global-option=--salt-config-dir=$_SALT_ETC_DIR --salt-cache-dir=${_SALT_CACHE_DIR} ${SETUP_PY_INSTALL_ARGS} /tmp/git/deps/salt*.whl"
+
+    ${_pip_cmd} install ${_USE_BREAK_SYSTEM_PACKAGES} --no-deps --force-reinstall \
+        ${_PIP_INSTALL_ARGS} \
         --global-option="--salt-config-dir=$_SALT_ETC_DIR --salt-cache-dir=${_SALT_CACHE_DIR} ${SETUP_PY_INSTALL_ARGS}" \
         /tmp/git/deps/salt*.whl || return 1
 
@@ -2960,21 +2843,16 @@ EOM
         return 1
     fi
     return 0
-}   # ----------  end of function __install_salt_from_repo_post_neon  ----------
+}   # ----------  end of function __install_salt_from_repo  ----------
 
 
-if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-    if [ "x${_PY_MAJOR_VERSION}" = "x" ]; then
-        # Default to python 2 for pre Neon installs
-        _PY_MAJOR_VERSION=2
-    fi
-else
-    if [ "x${_PY_MAJOR_VERSION}" = "x" ]; then
-        # Default to python 3 for post Neon install
-        _PY_MAJOR_VERSION=3
-    fi
+# shellcheck disable=SC2268
+if [ "x${_PY_MAJOR_VERSION}" = "x" ]; then
+    # Default to python 3 for install
+    _PY_MAJOR_VERSION=3
 fi
 
+
 #######################################################################################################################
 #
 #   Distribution install functions
@@ -3058,6 +2936,9 @@ fi
 #   Ubuntu Install Functions
 #
 __enable_universe_repository() {
+
+    echodebug "__enable_universe_repository() entry"
+
     if [ "$(grep -R universe /etc/apt/sources.list /etc/apt/sources.list.d/ | grep -v '#')" != "" ]; then
         # The universe repository is already enabled
         return 0
@@ -3071,16 +2952,15 @@ __enable_universe_repository() {
 }
 
 __install_saltstack_ubuntu_repository() {
+
     # Workaround for latest non-LTS Ubuntu
+    echodebug "__install_saltstack_ubuntu_repository() entry"
+
     if { [ "$DISTRO_MAJOR_VERSION" -eq 20 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
-        # remove 22 version when salt packages for 22.04 are available
-        [ "$DISTRO_MAJOR_VERSION" -eq 21 ] ||  [ "$DISTRO_MAJOR_VERSION" -eq 22 ]; then
+       { [ "$DISTRO_MAJOR_VERSION" -eq 22 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
+       { [ "$DISTRO_MAJOR_VERSION" -eq 24 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
+        [ "$DISTRO_MAJOR_VERSION" -eq 21 ] ||  [ "$DISTRO_MAJOR_VERSION" -eq 23 ] || [ "$DISTRO_MAJOR_VERSION" -eq 25 ]; then
         echowarn "Non-LTS Ubuntu detected, but stable packages requested. Trying packages for previous LTS release. You may experience problems."
-        UBUNTU_VERSION=20.04
-        UBUNTU_CODENAME="focal"
-    else
-        UBUNTU_VERSION=${DISTRO_VERSION}
-        UBUNTU_CODENAME=${DISTRO_CODENAME}
     fi
 
     # Install downloader backend for GPG keys fetching
@@ -3096,33 +2976,48 @@ __install_saltstack_ubuntu_repository() {
         __PACKAGES="${__PACKAGES} apt-transport-https ca-certificates"
     fi
 
+    ## include hwclock if not part of base OS (23.10 and up)
+    if [ ! -f /usr/sbin/hwclock ]; then
+        __PACKAGES="${__PACKAGES} util-linux-extra"
+    fi
+
     # shellcheck disable=SC2086,SC2090
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
-    __PY_VERSION_REPO="apt"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is not supported, only Python 3"
+        return 1
     fi
 
     # SaltStack's stable Ubuntu repository:
-    SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/${__PY_VERSION_REPO}/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${STABLE_REV}"
-    echo "$__REPO_ARCH_DEB $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > /etc/apt/sources.list.d/salt.list
-
-    __apt_key_fetch "$SALTSTACK_UBUNTU_URL/salt-archive-keyring.gpg" || return 1
-
+    __fetch_url "/etc/apt/sources.list.d/salt.sources" "https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.sources"
+    __apt_key_fetch "${HTTP_VAL}://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" || return 1
     __wait_for_apt apt-get update || return 1
+
+    if [ "$STABLE_REV" != "latest" ]; then
+        # latest is default
+        if [ "$(echo "$STABLE_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $STABLE_REV.*" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
+        elif [ "$(echo "$STABLE_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $STABLE_REV" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
+        fi
+    fi
+
 }
 
 __install_saltstack_ubuntu_onedir_repository() {
+
+    echodebug "__install_saltstack_ubuntu_onedir_repository() entry"
+
     # Workaround for latest non-LTS Ubuntu
     if { [ "$DISTRO_MAJOR_VERSION" -eq 20 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
-        [ "$DISTRO_MAJOR_VERSION" -eq 21 ]; then
+       { [ "$DISTRO_MAJOR_VERSION" -eq 22 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]; } || \
+        [ "$DISTRO_MAJOR_VERSION" -eq 21 ] ||  [ "$DISTRO_MAJOR_VERSION" -eq 23 ] || [ "$DISTRO_MAJOR_VERSION" -eq 25 ]; then
         echowarn "Non-LTS Ubuntu detected, but stable packages requested. Trying packages for previous LTS release. You may experience problems."
-        UBUNTU_VERSION=20.04
-        UBUNTU_CODENAME="focal"
-    else
-        UBUNTU_VERSION=${DISTRO_VERSION}
-        UBUNTU_CODENAME=${DISTRO_CODENAME}
     fi
 
     # Install downloader backend for GPG keys fetching
@@ -3138,35 +3033,38 @@ __install_saltstack_ubuntu_onedir_repository() {
         __PACKAGES="${__PACKAGES} apt-transport-https ca-certificates"
     fi
 
+    ## include hwclock if not part of base OS (23.10 and up)
+    if [ "$DISTRO_MAJOR_VERSION" -ge 23 ] && [ ! -f /usr/sbin/hwclock ]; then
+        __PACKAGES="${__PACKAGES} util-linux-extra"
+    fi
+
     # shellcheck disable=SC2086,SC2090
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
-    __PY_VERSION_REPO="apt"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
-    fi
-
     # SaltStack's stable Ubuntu repository:
-    SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${ONEDIR_REV}/"
-    if [ "${ONEDIR_REV}" = "nightly" ] ; then
-        SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/"
-    fi
-    echo "$__REPO_ARCH_DEB $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > /etc/apt/sources.list.d/salt.list
-
-    if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then
-      __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || return 1
-    elif [ "$(echo "${ONEDIR_REV}" | grep -E '(latest|nightly)')" != "" ]; then
-      __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || \
-      __apt_key_fetch "${SALTSTACK_UBUNTU_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1
-    else
-      __apt_key_fetch "${SALTSTACK_UBUNTU_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1
-    fi
-
+    __fetch_url "/etc/apt/sources.list.d/salt.sources" "https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.sources"
+    __apt_key_fetch "${HTTP_VAL}://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" || return 1
     __wait_for_apt apt-get update || return 1
+
+    if [ "$ONEDIR_REV" != "latest" ]; then
+        # latest is default
+        if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $ONEDIR_REV.*" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
+        elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+            ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $ONEDIR_REV_DOT" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
+        fi
+    fi
 }
 
 install_ubuntu_deps() {
-    if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
+
+    echodebug "install_ubuntu_deps() entry"
+    if [ "$_DISABLE_REPOS" -eq $BS_FALSE ]; then
         # Install add-apt-repository
         if ! __check_command_exists add-apt-repository; then
             __apt_get_install_noinput software-properties-common || return 1
@@ -3179,24 +3077,19 @@ install_ubuntu_deps() {
 
     __PACKAGES=''
 
-    if [ "$DISTRO_MAJOR_VERSION" -lt 16 ]; then
-        # Minimal systems might not have upstart installed, install it
-        __PACKAGES="upstart"
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=""
-    fi
-
-    if [ "$DISTRO_MAJOR_VERSION" -ge 16 ] && [ -z "$_PY_EXE" ]; then
-        __PACKAGES="${__PACKAGES} python2.7"
+    if [ "$DISTRO_MAJOR_VERSION" -ge 20 ] && [ -z "$_PY_EXE" ]; then
+        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}"
     fi
 
     if [ "$_VIRTUALENV_DIR" != "null" ]; then
-        __PACKAGES="${__PACKAGES} python-virtualenv"
+        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-virtualenv"
     fi
+
     # Need python-apt for managing packages via Salt
     __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-apt"
 
@@ -3209,6 +3102,14 @@ install_ubuntu_deps() {
     # Additionally install procps and pciutils which allows for Docker bootstraps. See 366#issuecomment-39666813
     __PACKAGES="${__PACKAGES} procps pciutils"
 
+    # ensure sudo, ps installed
+    __PACKAGES="${__PACKAGES} sudo"
+
+    ## include hwclock if not part of base OS (23.10 and up)
+    if [ ! -f /usr/sbin/hwclock ]; then
+        __PACKAGES="${__PACKAGES} util-linux-extra"
+    fi
+
     # shellcheck disable=SC2086,SC2090
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
@@ -3222,14 +3123,10 @@ install_ubuntu_deps() {
 }
 
 install_ubuntu_stable_deps() {
-    if [ "${_SLEEP}" -eq "${__DEFAULT_SLEEP}" ] && [ "$DISTRO_MAJOR_VERSION" -lt 16 ]; then
-        # The user did not pass a custom sleep value as an argument, let's increase the default value
-        echodebug "On Ubuntu systems we increase the default sleep value to 10."
-        echodebug "See https://github.com/saltstack/salt/issues/12248 for more info."
-        _SLEEP=10
-    fi
 
-    if [ $_START_DAEMONS -eq $BS_FALSE ]; then
+    echodebug "install_ubuntu_stable_deps() entry"
+
+    if [ "$_START_DAEMONS" -eq $BS_FALSE ]; then
         echowarn "Not starting daemons on Debian based distributions is not working mostly because starting them is the default behaviour."
     fi
 
@@ -3240,7 +3137,8 @@ install_ubuntu_stable_deps() {
 
     if [ "${_UPGRADE_SYS}" -eq $BS_TRUE ]; then
         if [ "${_INSECURE_DL}" -eq $BS_TRUE ]; then
-            if [ "$DISTRO_MAJOR_VERSION" -ge 20 ] || [ "$DISTRO_MAJOR_VERSION" -ge 21 ] || [ "$DISTRO_MAJOR_VERSION" -ge 22 ]; then
+            ## apt-key is deprecated
+            if [ "$DISTRO_MAJOR_VERSION" -ge 20 ]; then
                 __apt_get_install_noinput --allow-unauthenticated debian-archive-keyring && apt-get update || return 1
             else
                 __apt_get_install_noinput --allow-unauthenticated debian-archive-keyring &&
@@ -3260,6 +3158,9 @@ install_ubuntu_stable_deps() {
 }
 
 install_ubuntu_git_deps() {
+
+    echodebug "install_ubuntu_git_deps() entry"
+
     __wait_for_apt apt-get update || return 1
 
     if ! __check_command_exists git; then
@@ -3270,67 +3171,33 @@ install_ubuntu_git_deps() {
         __apt_get_install_noinput ca-certificates
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=""
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-
-        __PACKAGES=""
-
-        # See how we are installing packages
-        if [ "${_PIP_ALL}" -eq $BS_TRUE ]; then
-            __PACKAGES="${__PACKAGES} python-dev swig libssl-dev libzmq3 libzmq3-dev"
-
-            if ! __check_command_exists pip; then
-                __PACKAGES="${__PACKAGES} python-setuptools python-pip"
-            fi
-
-            # Get just the apt packages that are required to build all the pythons
-            # shellcheck disable=SC2086
-            __apt_get_install_noinput ${__PACKAGES} || return 1
-            # Install the pythons from requirements (only zmq for now)
-            __install_pip_deps "${_SALT_GIT_CHECKOUT_DIR}/requirements/zeromq.txt" || return 1
-        else
-            install_ubuntu_stable_deps || return 1
-
-            if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-                __PACKAGES="${__PACKAGES} python3-setuptools"
-            else
-                # There is no m2crypto package for Py3 at this time - only install for Py2
-                __PACKAGES="${__PACKAGES} python-m2crypto"
-            fi
-
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2"
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests"
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado python${PY_PKG_VER}-yaml"
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-zmq"
-            __PACKAGES="${__PACKAGES} python-concurrent.futures"
-
-            if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-                # Install python-libcloud if asked to
-                __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud"
-            fi
-
-            # shellcheck disable=SC2086
-            __apt_get_install_noinput ${__PACKAGES} || return 1
-        fi
-    else
-        __PACKAGES="python${PY_PKG_VER}-dev python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
-        if [ "$DISTRO_MAJOR_VERSION" -ge 22 ]; then
-            __PACKAGES="${__PACKAGES} g++"
-        fi
-        # shellcheck disable=SC2086
-        __apt_get_install_noinput ${__PACKAGES} || return 1
+    __PACKAGES="python${PY_PKG_VER}-dev python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
+    if [ "$DISTRO_MAJOR_VERSION" -ge 22 ]; then
+        __PACKAGES="${__PACKAGES} g++"
     fi
 
+    ## include hwclock if not part of base OS (23.10 and up)
+    if [ ! -f /usr/sbin/hwclock ]; then
+        __PACKAGES="${__PACKAGES} util-linux-extra"
+    fi
+
+    # Additionally install procps pciutils and sudo which allows for Docker bootstraps. See 366#issuecomment-39666813
+    __PACKAGES="${__PACKAGES} procps pciutils sudo"
+
+    # shellcheck disable=SC2086
+    __apt_get_install_noinput ${__PACKAGES} || return 1
+
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
@@ -3338,14 +3205,8 @@ install_ubuntu_git_deps() {
 }
 
 install_ubuntu_onedir_deps() {
-    if [ "${_SLEEP}" -eq "${__DEFAULT_SLEEP}" ] && [ "$DISTRO_MAJOR_VERSION" -lt 16 ]; then
-        # The user did not pass a custom sleep value as an argument, let's increase the default value
-        echodebug "On Ubuntu systems we increase the default sleep value to 10."
-        echodebug "See https://github.com/saltstack/salt/issues/12248 for more info."
-        _SLEEP=10
-    fi
 
-    if [ $_START_DAEMONS -eq $BS_FALSE ]; then
+    if [ "$_START_DAEMONS" -eq $BS_FALSE ]; then
         echowarn "Not starting daemons on Debian based distributions is not working mostly because starting them is the default behaviour."
     fi
 
@@ -3356,7 +3217,8 @@ install_ubuntu_onedir_deps() {
 
     if [ "${_UPGRADE_SYS}" -eq $BS_TRUE ]; then
         if [ "${_INSECURE_DL}" -eq $BS_TRUE ]; then
-            if [ "$DISTRO_MAJOR_VERSION" -ge 20 ] || [ "$DISTRO_MAJOR_VERSION" -ge 21 ]; then
+            ## apt-key is deprecated
+            if [ "$DISTRO_MAJOR_VERSION" -ge 20 ]; then
                 __apt_get_install_noinput --allow-unauthenticated debian-archive-keyring && apt-get update || return 1
             else
                 __apt_get_install_noinput --allow-unauthenticated debian-archive-keyring &&
@@ -3376,6 +3238,9 @@ install_ubuntu_onedir_deps() {
 }
 
 install_ubuntu_stable() {
+
+    __wait_for_apt apt-get update || return 1
+
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
@@ -3391,6 +3256,10 @@ install_ubuntu_stable() {
         __PACKAGES="${__PACKAGES} salt-syndic"
     fi
 
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api"
+    fi
+
     # shellcheck disable=SC2086
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
@@ -3398,6 +3267,7 @@ install_ubuntu_stable() {
 }
 
 install_ubuntu_git() {
+
     # Activate virtualenv before install
     if [ "${_VIRTUALENV_DIR}" != "null" ]; then
         __activate_virtualenv || return 1
@@ -3406,43 +3276,30 @@ install_ubuntu_git() {
     if [ -n "$_PY_EXE" ]; then
         _PYEXE=${_PY_EXE}
     else
-        _PYEXE=python2.7
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     fi
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        # We can use --prefix on debian based ditributions
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            _POST_NEON_PIP_INSTALL_ARGS="--target=/usr/lib/python3/dist-packages --install-option=--install-scripts=/usr/bin"
-        else
-            _POST_NEON_PIP_INSTALL_ARGS="--target=/usr/lib/python2.7/dist-packages --install-option=--install-scripts=/usr/bin"
-        fi
-        _POST_NEON_PIP_INSTALL_ARGS=""
-        __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1
+    _PIP_INSTALL_ARGS=""
+    __install_salt_from_repo "${_PY_EXE}" || return 1
+    cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1
 
-        # Account for new path for services files in later releases
-        if [ -d "pkg/common" ]; then
-          _SERVICE_DIR="pkg/common"
-        else
-          _SERVICE_DIR="pkg"
-        fi
-
-        sed -i 's:/usr/bin:/usr/local/bin:g' ${_SERVICE_DIR}/*.service
-        return 0
-    fi
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        # shellcheck disable=SC2086
-        "${_PYEXE}" setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --install-layout=deb || return 1
+    # Account for new path for services files in later releases
+    if [ -d "pkg/common" ]; then
+      _SERVICE_DIR="pkg/common"
     else
-        # shellcheck disable=SC2086
-        "${_PYEXE}" setup.py ${SETUP_PY_INSTALL_ARGS} install --install-layout=deb || return 1
+      _SERVICE_DIR="pkg"
     fi
 
+    sed -i 's:/usr/bin:/usr/local/bin:g' "${_SERVICE_DIR}"/*.service
     return 0
+
 }
 
 install_ubuntu_onedir() {
+
+    __wait_for_apt apt-get update || return 1
+
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
@@ -3458,6 +3315,10 @@ install_ubuntu_onedir() {
         __PACKAGES="${__PACKAGES} salt-syndic"
     fi
 
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api"
+    fi
+
     # shellcheck disable=SC2086
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
@@ -3465,6 +3326,7 @@ install_ubuntu_onedir() {
 }
 
 install_ubuntu_stable_post() {
+
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -3474,7 +3336,7 @@ install_ubuntu_stable_post() {
         [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             # Using systemd
             /bin/systemctl is-enabled salt-$fname.service > /dev/null 2>&1 || (
                 /bin/systemctl preset salt-$fname.service > /dev/null 2>&1 &&
@@ -3491,6 +3353,7 @@ install_ubuntu_stable_post() {
 }
 
 install_ubuntu_git_post() {
+
     for fname in api master minion syndic; do
         # Skip if not meant to be installed
         [ $fname = "api" ] && \
@@ -3506,7 +3369,7 @@ install_ubuntu_git_post() {
           _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg"
         fi
 
-        if [ -f /bin/systemctl ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
             __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service"
 
             # Skip salt-api since the service should be opt-in and not necessarily started on boot
@@ -3515,21 +3378,6 @@ install_ubuntu_git_post() {
             systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
             sleep 1
             systemctl daemon-reload
-        elif [ -f /sbin/initctl ]; then
-            _upstart_conf="/etc/init/salt-$fname.conf"
-            # We have upstart support
-            echodebug "There's upstart support"
-            if [ ! -f $_upstart_conf ]; then
-                # upstart does not know about our service, let's copy the proper file
-                echowarn "Upstart does not appear to know about salt-$fname"
-                echodebug "Copying ${_SERVICE_DIR}/salt-$fname.upstart to $_upstart_conf"
-                __copyfile "${_SERVICE_DIR}/salt-${fname}.upstart" "$_upstart_conf"
-                # Set service to know about virtualenv
-                if [ "${_VIRTUALENV_DIR}" != "null" ]; then
-                    echo "SALT_USE_VIRTUALENV=${_VIRTUALENV_DIR}" > /etc/default/salt-${fname}
-                fi
-                /sbin/initctl reload-configuration || return 1
-            fi
         # No upstart support in Ubuntu!?
         elif [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.init" ]; then
             echodebug "There's NO upstart support!?"
@@ -3542,7 +3390,7 @@ install_ubuntu_git_post() {
 
             update-rc.d salt-$fname defaults
         else
-            echoerror "Neither upstart nor init.d was setup for salt-$fname"
+            echoerror "No init.d was setup for salt-$fname"
         fi
     done
 
@@ -3550,13 +3398,12 @@ install_ubuntu_git_post() {
 }
 
 install_ubuntu_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
 
-    # Ensure upstart configs / systemd units are loaded
-    if [ -f /bin/systemctl ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
+
+    # Ensure systemd units are loaded
+    if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
         systemctl daemon-reload
-    elif [ -f /sbin/initctl ]; then
-        /sbin/initctl reload-configuration
     fi
 
     for fname in api master minion syndic; do
@@ -3568,7 +3415,7 @@ install_ubuntu_restart_daemons() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
             echodebug "There's systemd support while checking salt-$fname"
             systemctl stop salt-$fname > /dev/null 2>&1
             systemctl start salt-$fname.service && continue
@@ -3580,18 +3427,6 @@ install_ubuntu_restart_daemons() {
             fi
         fi
 
-        if [ -f /sbin/initctl ]; then
-            echodebug "There's upstart support while checking salt-$fname"
-
-            if status salt-$fname 2>/dev/null | grep -q running; then
-                stop salt-$fname || (echodebug "Failed to stop salt-$fname" && return 1)
-            fi
-
-            start salt-$fname && continue
-            # We failed to start the service, let's test the SysV code below
-            echodebug "Failed to start salt-$fname using Upstart"
-        fi
-
         if [ ! -f /etc/init.d/salt-$fname ]; then
             echoerror "No init.d support for salt-$fname was found"
             return 1
@@ -3605,6 +3440,7 @@ install_ubuntu_restart_daemons() {
 }
 
 install_ubuntu_check_services() {
+
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -3614,10 +3450,8 @@ install_ubuntu_check_services() {
         [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then
             __check_services_systemd salt-$fname || return 1
-        elif [ -f /sbin/initctl ] && [ -f /etc/init/salt-${fname}.conf ]; then
-            __check_services_upstart salt-$fname || return 1
         elif [ -f /etc/init.d/salt-$fname ]; then
             __check_services_debian salt-$fname || return 1
         fi
@@ -3635,12 +3469,12 @@ install_ubuntu_check_services() {
 #   Debian Install Functions
 #
 __install_saltstack_debian_repository() {
-    DEBIAN_RELEASE="$DISTRO_MAJOR_VERSION"
-    DEBIAN_CODENAME="$DISTRO_CODENAME"
 
-    __PY_VERSION_REPO="apt"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
+    echodebug "__install_saltstack_debian_repository() entry"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
     # Install downloader backend for GPG keys fetching
@@ -3659,22 +3493,33 @@ __install_saltstack_debian_repository() {
     # shellcheck disable=SC2086,SC2090
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
-    # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location
-    SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/${__PY_VERSION_REPO}/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/${STABLE_REV}"
-    echo "$__REPO_ARCH_DEB $SALTSTACK_DEBIAN_URL $DEBIAN_CODENAME main" > "/etc/apt/sources.list.d/salt.list"
-
-    __apt_key_fetch "$SALTSTACK_DEBIAN_URL/salt-archive-keyring.gpg" || return 1
-
+    __fetch_url "/etc/apt/sources.list.d/salt.sources" "https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.sources"
+    __apt_key_fetch "${HTTP_VAL}://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" || return 1
     __wait_for_apt apt-get update || return 1
+
+    if [ "$STABLE_REV" != "latest" ]; then
+        # latest is default
+        if [ "$(echo "$STABLE_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $STABLE_REV.*" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
+        elif [ "$(echo "$STABLE_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+            STABLE_REV_DOT=$(echo "$STABLE_REV" | sed 's/-/\./')
+            MINOR_VER_STRG="-$STABLE_REV_DOT"
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $STABLE_REV_DOT" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
+        fi
+    fi
 }
 
 __install_saltstack_debian_onedir_repository() {
-    DEBIAN_RELEASE="$DISTRO_MAJOR_VERSION"
-    DEBIAN_CODENAME="$DISTRO_CODENAME"
 
-    __PY_VERSION_REPO="apt"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
+    echodebug "__install_saltstack_debian_onedir_repository() entry"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
     # Install downloader backend for GPG keys fetching
@@ -3693,80 +3538,30 @@ __install_saltstack_debian_onedir_repository() {
     # shellcheck disable=SC2086,SC2090
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
-    # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location
-    SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/${ONEDIR_REV}/"
-    if [ "${ONEDIR_REV}" = "nightly" ] ; then
-        SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/"
-    fi
-    echo "$__REPO_ARCH_DEB $SALTSTACK_DEBIAN_URL $DEBIAN_CODENAME main" > "/etc/apt/sources.list.d/salt.list"
-
-    if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then
-      __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || return 1
-    elif [ "$(echo "${ONEDIR_REV}" | grep -E '(latest|nightly)')" != "" ]; then
-      __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || \
-      __apt_key_fetch "${SALTSTACK_DEBIAN_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1
-    else
-      __apt_key_fetch "${SALTSTACK_DEBIAN_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1
-    fi
-
-    __wait_for_apt apt-get update || return 1
-}
-
-install_debian_deps() {
-    if [ $_START_DAEMONS -eq $BS_FALSE ]; then
-        echowarn "Not starting daemons on Debian based distributions is not working mostly because starting them is the default behaviour."
-    fi
-
-    # No user interaction, libc6 restart services for example
-    export DEBIAN_FRONTEND=noninteractive
-
+    __fetch_url "/etc/apt/sources.list.d/salt.sources" "https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.sources"
+    __apt_key_fetch "${HTTP_VAL}://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" || return 1
     __wait_for_apt apt-get update || return 1
 
-    if [ "${_UPGRADE_SYS}" -eq $BS_TRUE ]; then
-        # Try to update GPG keys first if allowed
-        if [ "${_INSECURE_DL}" -eq $BS_TRUE ]; then
-            if [ "$DISTRO_MAJOR_VERSION" -ge 10 ]; then
-                __apt_get_install_noinput --allow-unauthenticated debian-archive-keyring && apt-get update || return 1
-            else
-                __apt_get_install_noinput --allow-unauthenticated debian-archive-keyring &&
-                    apt-key update && apt-get update || return 1
-            fi
+    if [ "$ONEDIR_REV" != "latest" ]; then
+        # latest is default
+        if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $ONEDIR_REV.*" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
+        elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+            ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+            echo "Package: salt-*" > /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin: version $ONEDIR_REV_DOT" >> /etc/apt/preferences.d/salt-pin-1001
+            echo "Pin-Priority: 1001" >> /etc/apt/preferences.d/salt-pin-1001
         fi
-
-        __apt_get_upgrade_noinput || return 1
     fi
-
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=""
-    fi
-
-    # Additionally install procps and pciutils which allows for Docker bootstraps. See 366#issuecomment-39666813
-    __PACKAGES='procps pciutils'
-
-    # YAML module is used for generating custom master/minion configs
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-yaml"
-
-    # shellcheck disable=SC2086
-    __apt_get_install_noinput ${__PACKAGES} || return 1
-
-    if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
-        __check_dpkg_architecture || return 1
-        __install_saltstack_debian_repository || return 1
-    fi
-
-    if [ "${_EXTRA_PACKAGES}" != "" ]; then
-        echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
-        # shellcheck disable=SC2086
-        __apt_get_install_noinput ${_EXTRA_PACKAGES} || return 1
-    fi
-
-    return 0
 }
 
 install_debian_onedir_deps() {
-    if [ $_START_DAEMONS -eq $BS_FALSE ]; then
+
+    echodebug "install_debian_onedir_git_deps() entry"
+
+    if [ "$_START_DAEMONS" -eq $BS_FALSE ]; then
         echowarn "Not starting daemons on Debian based distributions is not working mostly because starting them is the default behaviour."
     fi
 
@@ -3789,14 +3584,13 @@ install_debian_onedir_deps() {
         __apt_get_upgrade_noinput || return 1
     fi
 
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=""
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    # Additionally install procps and pciutils which allows for Docker bootstraps. See 366#issuecomment-39666813
-    __PACKAGES='procps pciutils'
+    # Additionally install procps,  pciutils and sudo which allows for Docker bootstraps. See 366#issuecomment-39666813
+    __PACKAGES='procps pciutils sudo'
 
     # YAML module is used for generating custom master/minion configs
     __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-yaml"
@@ -3818,201 +3612,50 @@ install_debian_onedir_deps() {
     return 0
 }
 
-install_debian_git_pre() {
-    if ! __check_command_exists git; then
-        __apt_get_install_noinput git || return 1
-    fi
-
-    if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
-        __apt_get_install_noinput ca-certificates
-    fi
-
-    __git_clone_and_checkout || return 1
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-    fi
-}
-
 install_debian_git_deps() {
-    install_debian_deps || return 1
-    install_debian_git_pre || return 1
 
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=""
-    fi
+    echodebug "install_debian_git_deps() entry"
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        __PACKAGES="libzmq3 libzmq3-dev lsb-release python-apt python-backports.ssl-match-hostname"
-        __PACKAGES="${__PACKAGES} python-crypto python-jinja2 python-msgpack python-m2crypto"
-        __PACKAGES="${__PACKAGES} python-requests python-tornado python-yaml python-zmq"
-
-        if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-            # Install python-libcloud if asked to
-            __PACKAGES="${__PACKAGES} python-libcloud"
-        fi
-
-        # shellcheck disable=SC2086
-        __apt_get_install_noinput ${__PACKAGES} || return 1
-    else
-        __PACKAGES="python${PY_PKG_VER}-dev python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
-        echodebug "install_debian_git_deps() Installing ${__PACKAGES}"
-        # shellcheck disable=SC2086
-        __apt_get_install_noinput ${__PACKAGES} || return 1
-    fi
-
-    return 0
-}
-
-install_debian_7_git_deps() {
-    install_debian_deps || return 1
-    install_debian_git_deps || return 1
-
-    return 0
-}
-
-install_debian_8_git_deps() {
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        echodebug "CALLING install_debian_git_deps"
-        install_debian_git_deps || return 1
-        return 0
-    fi
-
-    install_debian_deps || return 1
+    __wait_for_apt apt-get update || return 1
 
     if ! __check_command_exists git; then
-        __apt_get_install_noinput git || return 1
+        __apt_get_install_noinput git-core || return 1
     fi
 
     if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
         __apt_get_install_noinput ca-certificates
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    __PACKAGES="libzmq3 libzmq3-dev lsb-release python-apt python-crypto python-jinja2"
-    __PACKAGES="${__PACKAGES} python-m2crypto python-msgpack python-requests python-systemd"
-    __PACKAGES="${__PACKAGES} python-yaml python-zmq python-concurrent.futures"
-
-    if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-        # Install python-libcloud if asked to
-        __PACKAGES="${__PACKAGES} python-libcloud"
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    __PIP_PACKAGES=''
-    if (__check_pip_allowed >/dev/null 2>&1); then
-        __PIP_PACKAGES='tornado<5.0'
-        # Install development environment for building tornado Python module
-        __PACKAGES="${__PACKAGES} build-essential python-dev"
+    __PACKAGES="python${PY_PKG_VER}-dev python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
+    echodebug "install_debian_git_deps() Installing ${__PACKAGES}"
 
-        if ! __check_command_exists pip; then
-            __PACKAGES="${__PACKAGES} python-pip"
-        fi
-    # Attempt to configure backports repo on non-x86_64 system
-    elif [ $_DISABLE_REPOS -eq $BS_FALSE ] && [ "$DPKG_ARCHITECTURE" != "amd64" ]; then
-        # Check if Debian Backports repo already configured
-        if ! apt-cache policy | grep -q 'Debian Backports'; then
-            echo 'deb http://httpredir.debian.org/debian jessie-backports main' > \
-                /etc/apt/sources.list.d/backports.list
-        fi
-
-        __wait_for_apt apt-get update || return 1
-
-        # python-tornado package should be installed from backports repo
-        __PACKAGES="${__PACKAGES} python-backports.ssl-match-hostname python-tornado/jessie-backports"
-    else
-        __PACKAGES="${__PACKAGES} python-backports.ssl-match-hostname python-tornado"
-    fi
+    # Additionally install procps,  pciutils and sudo which allows for Docker bootstraps. See 366#issuecomment-39666813
+    __PACKAGES="${__PACKAGES} procps pciutils sudo"
 
     # shellcheck disable=SC2086
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
-    if [ "${__PIP_PACKAGES}" != "" ]; then
-        # shellcheck disable=SC2086,SC2090
-        pip install -U ${__PIP_PACKAGES} || return 1
-    fi
-
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
     return 0
 }
 
-install_debian_9_git_deps() {
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        install_debian_git_deps || return 1
-        return 0
-    fi
-
-    install_debian_deps || return 1
-    install_debian_git_pre || return 1
-
-    __PACKAGES="libzmq5 lsb-release"
-
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=""
-
-        # These packages are PY2-ONLY
-        __PACKAGES="${__PACKAGES} python-backports-abc python-m2crypto python-concurrent.futures"
-    fi
-
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-apt python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2"
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests python${PY_PKG_VER}-systemd"
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado python${PY_PKG_VER}-yaml python${PY_PKG_VER}-zmq"
-
-    if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-        # Install python-libcloud if asked to
-        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud"
-    fi
-
-    # shellcheck disable=SC2086
-    __apt_get_install_noinput ${__PACKAGES} || return 1
-
-    return 0
-}
-
-install_debian_10_git_deps() {
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        install_debian_git_deps || return 1
-        return 0
-    fi
-
-    install_debian_deps || return 1
-    install_debian_git_pre || return 1
-
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        _py=${_PY_EXE}
-        PY_PKG_VER=3
-        __PACKAGES="python${PY_PKG_VER}-distutils"
-    else
-        _py="python"
-        PY_PKG_VER=""
-        __PACKAGES=""
-    fi
-
-    __install_tornado_pip ${_py}|| return 1
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-jinja2"
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado python${PY_PKG_VER}-yaml python${PY_PKG_VER}-zmq"
-
-    # shellcheck disable=SC2086
-    __apt_get_install_noinput ${__PACKAGES} || return 1
-
-    return 0
-}
-
 install_debian_stable() {
+
+    __wait_for_apt apt-get update || return 1
+
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
@@ -4028,81 +3671,72 @@ install_debian_stable() {
         __PACKAGES="${__PACKAGES} salt-syndic"
     fi
 
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api"
+    fi
+
     # shellcheck disable=SC2086
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
     return 0
 }
 
-install_debian_7_stable() {
-    install_debian_stable || return 1
+install_debian_11_git_deps() {
+
+    install_debian_git_deps || return 1
     return 0
 }
 
-install_debian_8_stable() {
-    install_debian_stable || return 1
-    return 0
-}
+install_debian_12_git_deps() {
 
-install_debian_9_stable() {
-    install_debian_stable || return 1
+    install_debian_git_deps || return 1
     return 0
 }
 
 install_debian_git() {
+
     if [ -n "$_PY_EXE" ]; then
         _PYEXE=${_PY_EXE}
     else
-        _PYEXE=python
+        ## _PYEXE=python
+        echoerror "Python 2 is no longer supported, only Py3 packages"
+        return 1
     fi
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        # We can use --prefix on debian based ditributions
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            _POST_NEON_PIP_INSTALL_ARGS="--target=/usr/lib/python3/dist-packages --install-option=--install-scripts=/usr/bin"
-        else
-            _POST_NEON_PIP_INSTALL_ARGS="--target=/usr/lib/python2.7/dist-packages --install-option=--install-scripts=/usr/bin"
-        fi
-        _POST_NEON_PIP_INSTALL_ARGS=""
-        __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1
+    # We can use --prefix on debian based ditributions
 
-        # Account for new path for services files in later releases
-        if [ -d "pkg/common" ]; then
-          _SERVICE_DIR="pkg/common"
-        else
-          _SERVICE_DIR="pkg"
-        fi
+    _PIP_INSTALL_ARGS=""
 
-        sed -i 's:/usr/bin:/usr/local/bin:g' ${_SERVICE_DIR}/*.service
-        return 0
-    fi
+    __install_salt_from_repo "${_PY_EXE}" || return 1
+    cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1
 
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        # shellcheck disable=SC2086
-        "${_PYEXE}" setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --install-layout=deb || return 1
+    # Account for new path for services files in later releases
+    if [ -d "pkg/common" ]; then
+      _SERVICE_DIR="pkg/common"
     else
-        # shellcheck disable=SC2086
-        "${_PYEXE}" setup.py ${SETUP_PY_INSTALL_ARGS} install --install-layout=deb || return 1
+      _SERVICE_DIR="pkg"
     fi
+
+    sed -i 's:/usr/bin:/usr/local/bin:g' "${_SERVICE_DIR}"/*.service
+    return 0
 }
 
-install_debian_7_git() {
+install_debian_11_git() {
+
     install_debian_git || return 1
     return 0
 }
 
-install_debian_8_git() {
-    install_debian_git || return 1
-    return 0
-}
+install_debian_12_git() {
 
-install_debian_9_git() {
     install_debian_git || return 1
     return 0
 }
 
 install_debian_onedir() {
+
+    __wait_for_apt apt-get update || return 1
+
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
@@ -4118,6 +3752,10 @@ install_debian_onedir() {
         __PACKAGES="${__PACKAGES} salt-syndic"
     fi
 
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api"
+    fi
+
     # shellcheck disable=SC2086
     __apt_get_install_noinput ${__PACKAGES} || return 1
 
@@ -4125,6 +3763,7 @@ install_debian_onedir() {
 }
 
 install_debian_git_post() {
+
     for fname in api master minion syndic; do
         # Skip if not meant to be installed
         [ "$fname" = "api" ] && \
@@ -4141,7 +3780,7 @@ install_debian_git_post() {
         fi
 
         # Configure SystemD for Debian 8 "Jessie" and later
-        if [ -f /bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             if [ ! -f /lib/systemd/system/salt-${fname}.service ] || \
                 { [ -f /lib/systemd/system/salt-${fname}.service ] && [ $_FORCE_OVERWRITE -eq $BS_TRUE ]; }; then
                 if [ -f "${_SERVICE_DIR}/salt-${fname}.service" ]; then
@@ -4159,29 +3798,12 @@ install_debian_git_post() {
 
             /bin/systemctl enable "salt-${fname}.service"
             SYSTEMD_RELOAD=$BS_TRUE
-
-        # Install initscripts for Debian 7 "Wheezy"
-        elif [ ! -f "/etc/init.d/salt-$fname" ] || \
-            { [ -f "/etc/init.d/salt-$fname" ] && [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; }; then
-            __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/deb/salt-${fname}.init" "/etc/init.d/salt-${fname}"
-            __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/deb/salt-${fname}.environment" "/etc/default/salt-${fname}"
-
-            if [ ! -f "/etc/init.d/salt-${fname}" ]; then
-                echowarn "The init script for salt-${fname} was not found, skipping it..."
-                continue
-            fi
-
-            chmod +x "/etc/init.d/salt-${fname}"
-
-            # Skip salt-api since the service should be opt-in and not necessarily started on boot
-            [ "$fname" = "api" ] && continue
-
-            update-rc.d "salt-${fname}" defaults
         fi
     done
 }
 
 install_debian_2021_post() {
+
     # Kali 2021 (debian derivative) disables all network services by default
     # Using archlinux post function to enable salt systemd services
     install_arch_linux_post || return 1
@@ -4189,6 +3811,7 @@ install_debian_2021_post() {
 }
 
 install_debian_restart_daemons() {
+
     [ "$_START_DAEMONS" -eq $BS_FALSE ] && return 0
 
     for fname in api master minion syndic; do
@@ -4200,8 +3823,8 @@ install_debian_restart_daemons() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ]; then
-            # Debian 8 uses systemd
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
+            # Debian 8 and above uses systemd
             /bin/systemctl stop salt-$fname > /dev/null 2>&1
             /bin/systemctl start salt-$fname.service && continue
             if [ "$_ECHO_DEBUG" -eq $BS_TRUE ]; then
@@ -4217,6 +3840,7 @@ install_debian_restart_daemons() {
 }
 
 install_debian_check_services() {
+
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -4226,7 +3850,7 @@ install_debian_check_services() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             __check_services_systemd salt-$fname || return 1
         elif [ -f /etc/init.d/salt-$fname ]; then
             __check_services_debian salt-$fname || return 1
@@ -4245,58 +3869,76 @@ install_debian_check_services() {
 #
 
 __install_saltstack_fedora_onedir_repository() {
-    if [ "$ITYPE" = "stable" ]; then
-        REPO_REV="$ONEDIR_REV"
-    else
-        REPO_REV="latest"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    __PY_VERSION_REPO="yum"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
-    fi
-
-    GPG_KEY="SALT-PROJECT-GPG-PUBKEY-2023.pub"
-
-    REPO_FILE="/etc/yum.repos.d/salt.repo"
-
-    if [ ! -s "$REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
-        FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/fedora/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}"
-        if [ "${ONEDIR_REV}" = "nightly" ] ; then
-            FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/fedora/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/"
+    if [ ! -s "$YUM_REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
+        FETCH_URL="https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo"
+        __fetch_url "${YUM_REPO_FILE}" "${FETCH_URL}"
+        if [ "$ONEDIR_REV" != "latest" ]; then
+            # 3006.x is default, and latest for 3006.x branch
+            if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+                # latest version for branch 3006 | 3007
+                REPO_REV_MAJOR=$(echo "$ONEDIR_REV" | cut -d '.' -f 1)
+                if [ "$REPO_REV_MAJOR" -eq "3007" ]; then
+                    # Enable the Salt 3007 STS repo
+                    dnf config-manager --set-disable salt-repo-*
+                    dnf config-manager --set-enabled salt-repo-3007-sts
+                fi
+            elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+                # using minor version
+                ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+                echo "[salt-repo-${ONEDIR_REV_DOT}-lts]" > "${YUM_REPO_FILE}"
+                # shellcheck disable=SC2129
+                echo "name=Salt Repo for Salt v${ONEDIR_REV_DOT} LTS" >> "${YUM_REPO_FILE}"
+                echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                echo "priority=10" >> "${YUM_REPO_FILE}"
+                echo "enabled=1" >> "${YUM_REPO_FILE}"
+                echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+            fi
+        else
+            # Enable the Salt LATEST repo
+            dnf config-manager --set-disable salt-repo-*
+            dnf config-manager --set-enabled salt-repo-latest
         fi
+        dnf clean expire-cache || return 1
+        dnf makecache || return 1
 
-        __fetch_url "${REPO_FILE}" "${FETCH_URL}.repo"
-
-        __rpm_import_gpg "${FETCH_URL}/${GPG_KEY}" || return 1
-
-        yum clean metadata || return 1
-    elif [ "$REPO_REV" != "latest" ]; then
+    elif [ "$ONEDIR_REV" != "latest" ]; then
         echowarn "salt.repo already exists, ignoring salt version argument."
-        echowarn "Use -F (forced overwrite) to install $REPO_REV."
+        echowarn "Use -F (forced overwrite) to install $ONEDIR_REV."
     fi
 
     return 0
 }
 
 install_fedora_deps() {
+
     if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
         dnf -y update || return 1
     fi
 
     __PACKAGES="${__PACKAGES:=}"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -lt 3 ]; then
-        echoerror "There are no Python 2 stable packages for Fedora, only Py3 packages"
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
         return 1
     fi
 
     # Salt on Fedora is Py3
     PY_PKG_VER=3
 
+    ## find no dnf-utils in Fedora packaging archives and yum-utils EL7 and F30, none after
+    ## but find it on 8 and 9 Centos Stream
     __PACKAGES="${__PACKAGES} dnf-utils libyaml procps-ng python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2"
     __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests python${PY_PKG_VER}-zmq"
     __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-pip python${PY_PKG_VER}-m2crypto python${PY_PKG_VER}-pyyaml"
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-systemd"
+    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-systemd sudo"
     if [ "${_EXTRA_PACKAGES}" != "" ]; then
         echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
     fi
@@ -4307,84 +3949,11 @@ install_fedora_deps() {
     return 0
 }
 
-install_fedora_stable() {
-    if [ "$STABLE_REV" = "latest" ]; then
-        __SALT_VERSION=""
-    else
-        __SALT_VERSION="$(dnf list --showduplicates salt | grep "$STABLE_REV" | head -n 1 | awk '{print $2}')"
-        if [ "x${__SALT_VERSION}" = "x" ]; then
-            echoerror "Could not find a stable install for Salt ${STABLE_REV}"
-            exit 1
-        fi
-        echoinfo "Installing Stable Package Version ${__SALT_VERSION}"
-        __SALT_VERSION="-${__SALT_VERSION}"
-    fi
-    __PACKAGES=""
-
-    if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-cloud${__SALT_VERSION}"
-    fi
-    if [ "$_INSTALL_MASTER" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-master${__SALT_VERSION}"
-    fi
-    if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-minion${__SALT_VERSION}"
-    fi
-    if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-syndic${__SALT_VERSION}"
-    fi
-
-    # shellcheck disable=SC2086
-    __dnf_install_noinput ${__PACKAGES} || return 1
-
-    __python="python3"
-    if ! __check_command_exists python3; then
-        echoerror "Could not find a python3 binary?!"
-        return 1
-    fi
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        __check_pip_allowed "You need to allow pip based installations (-P) for Tornado <5.0 in order to install Salt"
-        __installed_tornado_rpm=$(rpm -qa | grep python${PY_PKG_VER}-tornado)
-        if [ -n "${__installed_tornado_rpm}" ]; then
-            echodebug "Removing system package ${__installed_tornado_rpm}"
-            rpm -e --nodeps "${__installed_tornado_rpm}" || return 1
-        fi
-        __get_site_packages_dir_code=$(cat << EOM
-import site
-print([d for d in site.getsitepackages() if d.startswith('/usr/lib/python')][0])
-EOM
-)
-        __target_path=$(${__python} -c "${__get_site_packages_dir_code}")
-        echodebug "Running '${__python}' -m pip install --target ${__target_path} 'tornado<5.0'"
-        "${__python}" -m pip install --target "${__target_path}" "tornado<5" || return 1
-    fi
-
-    return 0
-}
-
-install_fedora_stable_post() {
-    for fname in api master minion syndic; do
-        # Skip salt-api since the service should be opt-in and not necessarily started on boot
-        [ $fname = "api" ] && continue
-
-        # Skip if not meant to be installed
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
-        sleep 1
-        systemctl daemon-reload
-    done
-}
-
 install_fedora_git_deps() {
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        # Packages are named python3-<whatever>
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=2
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
     __PACKAGES=""
@@ -4401,81 +3970,44 @@ install_fedora_git_deps() {
         __PACKAGES=""
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
+    __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc gcc-c++ sudo"
 
-        if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
-            __PACKAGES="${__PACKAGES} ca-certificates"
-        fi
-        if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud python${PY_PKG_VER}-netaddr"
-        fi
-
-        install_fedora_deps || return 1
-
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            if __check_command_exists python3; then
-                __python="python3"
-            fi
-        elif [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-            if __check_command_exists python2; then
-                __python="python2"
-            fi
-        else
-            if ! __check_command_exists python; then
-                echoerror "Unable to find a python binary?!"
-                return 1
-            fi
-            # Let's hope it's the right one
-            __python="python"
-        fi
-
-        grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" | while IFS='
-    '         read -r dep; do
-                echodebug "Running '${__python}' -m pip install '${dep}'"
-                "${__python}" -m pip install "${dep}" || return 1
-            done
-    else
-        __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
-        if [ "${DISTRO_VERSION}" -ge 35 ]; then
-            __PACKAGES="${__PACKAGES} gcc-c++"
-        fi
-        # shellcheck disable=SC2086
-        __dnf_install_noinput ${__PACKAGES} || return 1
-    fi
+    # shellcheck disable=SC2086
+    __dnf_install_noinput ${__PACKAGES} || return 1
 
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
+    _fedora_dep="contextvars"
+    echodebug "Running '${_PY_EXE} -m pip install --upgrade ${_fedora_dep}'"
+    ${_PY_EXE} -m pip install --upgrade "${_fedora_dep}"
+
     return 0
 }
 
 install_fedora_git() {
+
     if [ "${_PY_EXE}" != "" ]; then
         _PYEXE=${_PY_EXE}
         echoinfo "Using the following python version: ${_PY_EXE} to install salt"
     else
-        _PYEXE='python2'
+        echoerror "Python 2 is no longer supported, only Py3 packages"
+        return 1
     fi
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        ${_PYEXE} setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1
-    else
-        ${_PYEXE} setup.py ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1
-    fi
+     __install_salt_from_repo "${_PY_EXE}" || return 1
     return 0
+
 }
 
 install_fedora_git_post() {
+
     for fname in api master minion syndic; do
         # Skip if not meant to be installed
         [ $fname = "api" ] && \
@@ -4503,10 +4035,12 @@ install_fedora_git_post() {
         systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
         sleep 1
         systemctl daemon-reload
+
     done
 }
 
 install_fedora_restart_daemons() {
+
     [ $_START_DAEMONS -eq $BS_FALSE ] && return
 
     for fname in api master minion syndic; do
@@ -4529,6 +4063,7 @@ install_fedora_restart_daemons() {
 }
 
 install_fedora_check_services() {
+
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -4561,19 +4096,13 @@ install_fedora_onedir_deps() {
     fi
 
     # If -R was passed, we need to configure custom repo url with rsync-ed packages
-    # Which is still handled in __install_saltstack_rhel_repository. This call has
+    # Which is still handled in __install_saltstack_rhel_onedir_repository. This call has
     # its own check in case -r was passed without -R.
     if [ "$_CUSTOM_REPO_URL" != "null" ]; then
         __install_saltstack_fedora_onedir_repository || return 1
     fi
 
-    if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then
-        __PACKAGES="dnf-utils chkconfig"
-    else
-        __PACKAGES="yum-utils chkconfig"
-    fi
-
-    __PACKAGES="${__PACKAGES} procps"
+    __PACKAGES="dnf-utils chkconfig procps-ng sudo"
 
     # shellcheck disable=SC2086
     __yum_install_noinput ${__PACKAGES} || return 1
@@ -4590,36 +4119,67 @@ install_fedora_onedir_deps() {
 
 
 install_fedora_onedir() {
+
     STABLE_REV=$ONEDIR_REV
     #install_fedora_stable || return 1
+    if [ "$(echo "$STABLE_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+        # Major version Salt, config and repo already setup
+        MINOR_VER_STRG=""
+    elif [ "$(echo "$STABLE_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+        # Minor version Salt, need to add specific minor version
+        STABLE_REV_DOT=$(echo "$STABLE_REV" | sed 's/-/\./')
+        MINOR_VER_STRG="-$STABLE_REV_DOT"
+    else
+        MINOR_VER_STRG=""
+    fi
 
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-cloud"
+        __PACKAGES="${__PACKAGES} salt-cloud$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-master"
+        __PACKAGES="${__PACKAGES} salt-master$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-minion"
+        __PACKAGES="${__PACKAGES} salt-minion$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-syndic"
+        __PACKAGES="${__PACKAGES} salt-syndic$MINOR_VER_STRG"
+    fi
+
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api$MINOR_VER_STRG"
     fi
 
     # shellcheck disable=SC2086
+    dnf makecache || return 1
     __yum_install_noinput ${__PACKAGES} || return 1
 
     return 0
 }
 
 install_fedora_onedir_post() {
+
     STABLE_REV=$ONEDIR_REV
-    install_fedora_stable_post || return 1
+
+    for fname in api master minion syndic; do
+        # Skip salt-api since the service should be opt-in and not necessarily started on boot
+        [ $fname = "api" ] && continue
+
+        # Skip if not meant to be installed
+        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
+        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
+        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
+
+        systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
+        sleep 1
+        systemctl daemon-reload
+    done
 
     return 0
 }
+
 #
 #   Ended Fedora Install Functions
 #
@@ -4629,133 +4189,57 @@ install_fedora_onedir_post() {
 #
 #   CentOS Install Functions
 #
-__install_saltstack_rhel_repository() {
-  if [ "${DISTRO_MAJOR_VERSION}" -ge 9 ]; then
-    echoerror "Old stable repository unavailable on RH variants greater than or equal to 9"
-    echoerror "Use the stable install type."
-    exit 1
-  fi
-
-    if [ "$ITYPE" = "stable" ]; then
-        repo_rev="$STABLE_REV"
-    else
-        repo_rev="latest"
-    fi
-
-    __PY_VERSION_REPO="yum"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
-    fi
-
-    # Avoid using '$releasever' variable for yum.
-    # Instead, this should work correctly on all RHEL variants.
-    base_url="${HTTP_VAL}://${_REPO_URL}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/${repo_rev}/"
-    if [ "${DISTRO_MAJOR_VERSION}" -eq 7 ]; then
-        gpg_key="SALTSTACK-GPG-KEY.pub base/RPM-GPG-KEY-CentOS-7"
-    elif [ "${DISTRO_MAJOR_VERSION}" -ge 9 ]; then
-        gpg_key="SALTSTACK-GPG-KEY2.pub"
-    else
-        gpg_key="SALTSTACK-GPG-KEY.pub"
-    fi
-
-    gpg_key_urls=""
-    for key in $gpg_key; do
-        gpg_key_urls=$(printf "${base_url}${key},%s" "$gpg_key_urls")
-    done
-
-    repo_file="/etc/yum.repos.d/salt.repo"
-
-    if [ ! -s "$repo_file" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
-        cat <<_eof > "$repo_file"
-[saltstack]
-name=SaltStack ${repo_rev} Release Channel for RHEL/CentOS \$releasever
-baseurl=${base_url}
-skip_if_unavailable=True
-gpgcheck=1
-gpgkey=${gpg_key_urls}
-enabled=1
-enabled_metadata=1
-_eof
-
-        fetch_url="${HTTP_VAL}://${_REPO_URL}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${repo_rev}/"
-        for key in $gpg_key; do
-            __rpm_import_gpg "${fetch_url}${key}" || return 1
-        done
-
-        yum clean metadata || return 1
-    elif [ "$repo_rev" != "latest" ]; then
-        echowarn "salt.repo already exists, ignoring salt version argument."
-        echowarn "Use -F (forced overwrite) to install $repo_rev."
-    fi
-
-    return 0
-}
-
 __install_saltstack_rhel_onedir_repository() {
-    if [ "$ITYPE" = "stable" ]; then
-        repo_rev="$ONEDIR_REV"
-    else
-        repo_rev="latest"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    __PY_VERSION_REPO="yum"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
-    fi
-
-    # Avoid using '$releasever' variable for yum.
-    # Instead, this should work correctly on all RHEL variants.
-    base_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/${ONEDIR_REV}/"
-    if [ "${ONEDIR_REV}" = "nightly" ] ; then
-        base_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/"
-    fi
-    if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ] || [ "${ONEDIR_REV}" = "nightly" ]; then
-      if [ "${DISTRO_MAJOR_VERSION}" -eq 9 ]; then
-          gpg_key="SALTSTACK-GPG-KEY2.pub"
-      else
-          gpg_key="SALTSTACK-GPG-KEY.pub"
-      fi
-    else
-        gpg_key="SALT-PROJECT-GPG-PUBKEY-2023.pub"
-    fi
-
-    gpg_key_urls=""
-    for key in $gpg_key; do
-        gpg_key_urls=$(printf "${base_url}${key},%s" "$gpg_key_urls")
-    done
-
-    repo_file="/etc/yum.repos.d/salt.repo"
-
-    if [ ! -s "$repo_file" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
-        cat <<_eof > "$repo_file"
-[saltstack]
-name=SaltStack ${repo_rev} Release Channel for RHEL/CentOS \$releasever
-baseurl=${base_url}
-skip_if_unavailable=True
-gpgcheck=1
-gpgkey=${gpg_key_urls}
-enabled=1
-enabled_metadata=1
-_eof
-
-        fetch_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}/"
-        if [ "${ONEDIR_REV}" = "nightly" ] ; then
-            fetch_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/"
+    if [ ! -s "$YUM_REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
+        FETCH_URL="https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo"
+        __fetch_url "${YUM_REPO_FILE}" "${FETCH_URL}"
+        if [ "$ONEDIR_REV" != "latest" ]; then
+            # 3006.x is default, and latest for 3006.x branch
+            if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+                # latest version for branch 3006 | 3007
+                REPO_REV_MAJOR=$(echo "$ONEDIR_REV" | cut -d '.' -f 1)
+                if [ "$REPO_REV_MAJOR" -eq "3007" ]; then
+                    # Enable the Salt 3007 STS repo
+                    yum config-manager --set-disable salt-repo-*
+                    yum config-manager --set-enabled salt-repo-3007-sts
+                fi
+            elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+                # using minor version
+                ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+                echo "[salt-repo-${ONEDIR_REV_DOT}-lts]" > "${YUM_REPO_FILE}"
+                # shellcheck disable=SC2129
+                echo "name=Salt Repo for Salt v${ONEDIR_REV_DOT} LTS" >> "${YUM_REPO_FILE}"
+                echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                echo "priority=10" >> "${YUM_REPO_FILE}"
+                echo "enabled=1" >> "${YUM_REPO_FILE}"
+                echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+            fi
+        else
+            # Enable the Salt LATEST repo
+            yum config-manager --set-disable salt-repo-*
+            yum config-manager --set-enabled salt-repo-latest
         fi
-        for key in $gpg_key; do
-            __rpm_import_gpg "${fetch_url}${key}" || return 1
-        done
-
-        yum clean metadata || return 1
-    elif [ "$repo_rev" != "latest" ]; then
+        yum clean expire-cache || return 1
+        yum makecache || return 1
+    elif [ "$ONEDIR_REV" != "latest" ]; then
         echowarn "salt.repo already exists, ignoring salt version argument."
-        echowarn "Use -F (forced overwrite) to install $repo_rev."
+        echowarn "Use -F (forced overwrite) to install $ONEDIR_REV."
     fi
 
     return 0
 }
 
 install_centos_stable_deps() {
+
     if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
         yum -y update || return 1
     fi
@@ -4767,48 +4251,18 @@ install_centos_stable_deps() {
     fi
 
     if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then
-        __install_saltstack_rhel_repository || return 1
+        echoerror "old-stable packages are no longer supported and are End-Of-Life."
+        return 1
     fi
 
     # If -R was passed, we need to configure custom repo url with rsync-ed packages
-    # Which is still handled in __install_saltstack_rhel_repository. This call has
+    # Which is still handled in __install_saltstack_rhel_onedir_repository. This call has
     # its own check in case -r was passed without -R.
     if [ "$_CUSTOM_REPO_URL" != "null" ]; then
-        __install_saltstack_rhel_repository || return 1
+        __install_saltstack_rhel_onedir_repository || return 1
     fi
 
-    if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then
-        __PACKAGES="dnf-utils chkconfig"
-    else
-        __PACKAGES="yum-utils chkconfig"
-    fi
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then
-            # YAML module is used for generating custom master/minion configs
-            if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-                __PACKAGES="${__PACKAGES} python3-pyyaml python3-setuptools"
-            else
-                __PACKAGES="${__PACKAGES} python2-pyyaml"
-            fi
-        elif [ "$DISTRO_MAJOR_VERSION" -eq 7 ]; then
-            # YAML module is used for generating custom master/minion configs
-            if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-                __PACKAGES="${__PACKAGES} python36-PyYAML python36-setuptools"
-            else
-                __PACKAGES="${__PACKAGES} PyYAML"
-            fi
-        else
-            # YAML module is used for generating custom master/minion configs
-            if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-                __PACKAGES="${__PACKAGES} python34-PyYAML python34-setuptools"
-            else
-                __PACKAGES="${__PACKAGES} PyYAML"
-            fi
-        fi
-    fi
-
-    __PACKAGES="${__PACKAGES} procps"
+    __PACKAGES="yum-utils chkconfig procps-ng findutils sudo"
 
     # shellcheck disable=SC2086
     __yum_install_noinput ${__PACKAGES} || return 1
@@ -4819,27 +4273,43 @@ install_centos_stable_deps() {
         __yum_install_noinput ${_EXTRA_PACKAGES} || return 1
     fi
 
-
     return 0
 }
 
 install_centos_stable() {
+
+    if [ "$(echo "$STABLE_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+        # Major version Salt, config and repo already setup
+        MINOR_VER_STRG=""
+    elif [ "$(echo "$STABLE_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+        # Minor version Salt, need to add specific minor version
+        STABLE_REV_DOT=$(echo "$STABLE_REV" | sed 's/-/\./')
+        MINOR_VER_STRG="-$STABLE_REV_DOT"
+    else
+        MINOR_VER_STRG=""
+    fi
+
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-cloud"
+        __PACKAGES="${__PACKAGES} salt-cloud$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-master"
+        __PACKAGES="${__PACKAGES} salt-master$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-minion"
+        __PACKAGES="${__PACKAGES} salt-minion$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-syndic"
+        __PACKAGES="${__PACKAGES} salt-syndic$MINOR_VER_STRG"
+    fi
+
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api$MINOR_VER_STRG"
     fi
 
     # shellcheck disable=SC2086
+    yum makecache || return 1
     __yum_install_noinput ${__PACKAGES} || return 1
 
     # Workaround for 3.11 broken on CentOS Stream 8.x
@@ -4853,6 +4323,7 @@ install_centos_stable() {
 }
 
 install_centos_stable_post() {
+
     SYSTEMD_RELOAD=$BS_FALSE
 
     for fname in api master minion syndic; do
@@ -4864,7 +4335,7 @@ install_centos_stable_post() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             /bin/systemctl is-enabled salt-${fname}.service > /dev/null 2>&1 || (
                 /bin/systemctl preset salt-${fname}.service > /dev/null 2>&1 &&
                 /bin/systemctl enable salt-${fname}.service > /dev/null 2>&1
@@ -4884,14 +4355,14 @@ install_centos_stable_post() {
 }
 
 install_centos_git_deps() {
+
     # First try stable deps then fall back to onedir deps if that one fails
     # if we're installing on a Red Hat based host that doesn't have the classic
     # package repos available.
     # Set ONEDIR_REV to STABLE_REV in case we
     # end up calling install_centos_onedir_deps
     ONEDIR_REV=${STABLE_REV}
-    install_centos_onedir_deps || \
-    return 1
+    install_centos_onedir_deps || return 1
 
     if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
         __yum_install_noinput ca-certificates || return 1
@@ -4901,116 +4372,29 @@ install_centos_git_deps() {
         __yum_install_noinput git || return 1
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
     __PACKAGES=""
 
     if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then
-            # Packages are named python3-<whatever>
-            PY_PKG_VER=3
-            __PACKAGES="${__PACKAGES} python3"
-        else
-            # Packages are named python36-<whatever>
-            PY_PKG_VER=36
-            __PACKAGES="${__PACKAGES} python36"
-        fi
+        # Packages are named python3-<whatever>
+        PY_PKG_VER=3
+        __PACKAGES="${__PACKAGES} python3"
     else
-        PY_PKG_VER=""
-        if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then
-            __PACKAGES="${__PACKAGES} python2"
-        elif [ "$DISTRO_MAJOR_VERSION" -eq 6 ]; then
-            PY_PKG_VER=27
-            __PACKAGES="${__PACKAGES} python27"
-        else
-            __PACKAGES="${__PACKAGES} python"
-        fi
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     fi
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        _install_m2crypto_req=false
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            _py=${_PY_EXE}
-            if [ "$DISTRO_MAJOR_VERSION" -gt 6 ]; then
-                _install_m2crypto_req=true
-            fi
-        else
-            if [ "$DISTRO_MAJOR_VERSION" -eq 6 ]; then
-                _install_m2crypto_req=true
-            fi
-            _py="python"
+    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc sudo"
 
-            # Only Py2 needs python-futures
-            __PACKAGES="${__PACKAGES} python-futures"
+    # shellcheck disable=SC2086
+    __yum_install_noinput ${__PACKAGES} || return 1
 
-            # There is no systemd-python3 package as of this writing
-            if [ "$DISTRO_MAJOR_VERSION" -ge 7 ]; then
-                __PACKAGES="${__PACKAGES} systemd-python"
-            fi
-        fi
-
-        if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then
-            __install_tornado_pip ${_py} || return 1
-            __PACKAGES="${__PACKAGES} python3-m2crypto"
-        else
-            __PACKAGES="${__PACKAGES} m2crypto python${PY_PKG_VER}-crypto"
-        fi
-
-        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-jinja2"
-        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests"
-        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado python${PY_PKG_VER}-zmq"
-
-        if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud"
-        fi
-
-        if [ "${_INSTALL_PY}" -eq "${BS_TRUE}" ]; then
-            # Install Python if "-y" was passed in.
-            __install_python || return 1
-        fi
-
-        if [ "${_PY_EXE}" != "" ] && [ "$_PIP_ALLOWED" -eq "$BS_TRUE" ]; then
-            # If "-x" is defined, install dependencies with pip based on the Python version given.
-            _PIP_PACKAGES="m2crypto!=0.33.0 jinja2 msgpack-python pycrypto PyYAML tornado<5.0 zmq futures>=2.0"
-
-            # install swig and openssl on cent6
-            if $_install_m2crypto_req; then
-                __yum_install_noinput openssl-devel swig || return 1
-            fi
-
-            if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-                # Filter out any commented lines from the requirements file
-                _REQ_LINES="$(grep '^[^#]' "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-                for SINGLE_PACKAGE in ${_PIP_PACKAGES}; do
-                    __REQUIRED_VERSION="$(grep "${SINGLE_PACKAGE}" "${_REQ_LINES}")"
-                    if [ "${__REQUIRED_VERSION}" != "" ]; then
-                        _PIP_PACKAGES=$(echo "$_PIP_PACKAGES" | sed "s/${SINGLE_PACKAGE}/${__REQUIRED_VERSION}/")
-                    fi
-                done
-            fi
-
-            if [ "$_INSTALL_CLOUD" -eq "${BS_TRUE}" ]; then
-                _PIP_PACKAGES="${_PIP_PACKAGES} apache-libcloud"
-            fi
-
-            __install_pip_pkgs "${_PIP_PACKAGES}" "${_PY_EXE}" || return 1
-        else
-            # shellcheck disable=SC2086
-            __yum_install_noinput ${__PACKAGES} || return 1
-        fi
-    else
-        if [ "${_INSTALL_PY}" -eq "${BS_TRUE}" ] && [ "$DISTRO_MAJOR_VERSION" -lt 8 ]; then
-            # Install Python if "-y" was passed in.
-            __install_python || return 1
-        fi
-        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
-        # shellcheck disable=SC2086
-        __yum_install_noinput ${__PACKAGES} || return 1
-    fi
 
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
@@ -5018,29 +4402,23 @@ install_centos_git_deps() {
 }
 
 install_centos_git() {
+
     if [ "${_PY_EXE}" != "" ]; then
         _PYEXE=${_PY_EXE}
         echoinfo "Using the following python version: ${_PY_EXE} to install salt"
     else
-        _PYEXE='python2'
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     fi
 
     echodebug "_PY_EXE: $_PY_EXE"
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        $_PYEXE setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1
-    else
-        $_PYEXE setup.py ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1
-    fi
+     __install_salt_from_repo "${_PY_EXE}" || return 1
 
     return 0
 }
 
 install_centos_git_post() {
+
     SYSTEMD_RELOAD=$BS_FALSE
 
     for fname in api master minion syndic; do
@@ -5057,7 +4435,8 @@ install_centos_git_post() {
         else
           _SERVICE_FILE="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service"
         fi
-        if [ -f /bin/systemctl ]; then
+
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             if [ ! -f "/usr/lib/systemd/system/salt-${fname}.service" ] || \
                 { [ -f "/usr/lib/systemd/system/salt-${fname}.service" ] && [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; }; then
                 __copyfile "${_SERVICE_FILE}" /usr/lib/systemd/system
@@ -5081,7 +4460,8 @@ install_centos_git_post() {
 }
 
 install_centos_onedir_deps() {
-    if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
+
+    if [ "$_UPGRADE_SYS" -eq "$BS_TRUE" ]; then
         yum -y update || return 1
     fi
 
@@ -5096,19 +4476,13 @@ install_centos_onedir_deps() {
     fi
 
     # If -R was passed, we need to configure custom repo url with rsync-ed packages
-    # Which is still handled in __install_saltstack_rhel_repository. This call has
-    # its own check in case -r was passed without -R.
+    # Which was still handled in __install_saltstack_rhel_repository, which was for old-stable which
+    # is removed since End-Of-Life. This call has its own check in case -r was passed without -R.
     if [ "$_CUSTOM_REPO_URL" != "null" ]; then
         __install_saltstack_rhel_onedir_repository || return 1
     fi
 
-    if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then
-        __PACKAGES="dnf-utils chkconfig"
-    else
-        __PACKAGES="yum-utils chkconfig"
-    fi
-
-    __PACKAGES="${__PACKAGES} procps"
+    __PACKAGES="yum-utils chkconfig procps-ng findutils sudo"
 
     # shellcheck disable=SC2086
     __yum_install_noinput ${__PACKAGES} || return 1
@@ -5119,33 +4493,51 @@ install_centos_onedir_deps() {
         __yum_install_noinput ${_EXTRA_PACKAGES} || return 1
     fi
 
-
     return 0
 }
 
 install_centos_onedir() {
+
+    if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+        # Major version Salt, config and repo already setup
+        MINOR_VER_STRG=""
+    elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+        # Minor version Salt, need to add specific minor version
+        ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+        MINOR_VER_STRG="-$ONEDIR_REV_DOT"
+    else
+        MINOR_VER_STRG=""
+    fi
+
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-cloud"
+        __PACKAGES="${__PACKAGES} salt-cloud$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-master"
+        __PACKAGES="${__PACKAGES} salt-master$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-minion"
+        __PACKAGES="${__PACKAGES} salt-minion$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-syndic"
+        __PACKAGES="${__PACKAGES} salt-syndic$MINOR_VER_STRG"
+    fi
+
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api$MINOR_VER_STRG"
     fi
 
     # shellcheck disable=SC2086
+    yum makecache || return 1
+    yum list salt-minion || return 1
     __yum_install_noinput ${__PACKAGES} || return 1
 
     return 0
 }
 
 install_centos_onedir_post() {
+
     SYSTEMD_RELOAD=$BS_FALSE
 
     for fname in api master minion syndic; do
@@ -5157,7 +4549,7 @@ install_centos_onedir_post() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             /bin/systemctl is-enabled salt-${fname}.service > /dev/null 2>&1 || (
                 /bin/systemctl preset salt-${fname}.service > /dev/null 2>&1 &&
                 /bin/systemctl enable salt-${fname}.service > /dev/null 2>&1
@@ -5177,7 +4569,8 @@ install_centos_onedir_post() {
 }
 
 install_centos_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
+
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
 
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
@@ -5188,26 +4581,11 @@ install_centos_restart_daemons() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /sbin/initctl ] && [ -f /etc/init/salt-${fname}.conf ]; then
-            # We have upstart support and upstart knows about our service
-            if ! /sbin/initctl status salt-$fname > /dev/null 2>&1; then
-                # Everything is in place and upstart gave us an error code? Fail!
-                return 1
-            fi
-
-            # upstart knows about this service.
-            # Let's try to stop it, and then start it
-            /sbin/initctl stop salt-$fname > /dev/null 2>&1
-            # Restart service
-            if ! /sbin/initctl start salt-$fname > /dev/null 2>&1; then
-                # Failed the restart?!
-                return 1
-            fi
-        elif [ -f /etc/init.d/salt-$fname ]; then
+        if [ -f /etc/init.d/salt-$fname ]; then
             # Disable stdin to fix shell session hang on killing tee pipe
             service salt-$fname stop < /dev/null > /dev/null 2>&1
             service salt-$fname start < /dev/null
-        elif [ -f /usr/bin/systemctl ]; then
+        elif [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             # CentOS 7 uses systemd
             /usr/bin/systemctl stop salt-$fname > /dev/null 2>&1
             /usr/bin/systemctl start salt-$fname.service && continue
@@ -5221,21 +4599,25 @@ install_centos_restart_daemons() {
 }
 
 install_centos_testing_deps() {
+
     install_centos_stable_deps || return 1
     return 0
 }
 
 install_centos_testing() {
+
     install_centos_stable || return 1
     return 0
 }
 
 install_centos_testing_post() {
+
     install_centos_stable_post || return 1
     return 0
 }
 
 install_centos_check_services() {
+
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -5245,12 +4627,10 @@ install_centos_check_services() {
         [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /sbin/initctl ] && [ -f /etc/init/salt-${fname}.conf ]; then
-            __check_services_upstart salt-$fname || return 1
-        elif [ -f /etc/init.d/salt-$fname ]; then
-            __check_services_sysvinit salt-$fname || return 1
-        elif [ -f /usr/bin/systemctl ]; then
-            __check_services_systemd salt-$fname || return 1
+        if [ -f "/etc/init.d/salt-$fname" ]; then
+            __check_services_sysvinit "salt-$fname" || return 1
+        elif [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
+            __check_services_systemd "salt-$fname" || return 1
         fi
     done
 
@@ -5266,286 +4646,343 @@ install_centos_check_services() {
 #   RedHat Install Functions
 #
 install_red_hat_linux_stable_deps() {
+
     install_centos_stable_deps || return 1
     return 0
 }
 
 install_red_hat_linux_git_deps() {
+
     install_centos_git_deps || return 1
     return 0
 }
 
 install_red_hat_linux_onedir_deps() {
+
     install_centos_onedir_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_stable_deps() {
+
     install_red_hat_linux_stable_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_git_deps() {
+
     install_red_hat_linux_git_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_onedir_deps() {
+
     install_red_hat_linux_onedir_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_stable_deps() {
+
     install_red_hat_linux_stable_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_git_deps() {
+
     install_red_hat_linux_git_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_onedir_deps() {
+
     install_red_hat_linux_onedir_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_stable_deps() {
+
     install_red_hat_linux_stable_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_git_deps() {
+
     install_red_hat_linux_git_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_onedir_deps() {
+
     install_red_hat_linux_onedir_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_stable_deps() {
+
     install_red_hat_linux_stable_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_git_deps() {
+
     install_red_hat_linux_git_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_onedir_deps() {
+
     install_red_hat_linux_timat_deps || return 1
     return 0
 }
 
 install_red_hat_linux_stable() {
+
     install_centos_stable || return 1
     return 0
 }
 
 install_red_hat_linux_git() {
+
     install_centos_git || return 1
     return 0
 }
 
 install_red_hat_linux_onedir() {
+
     install_centos_onedir || return 1
     return 0
 }
 
 install_red_hat_enterprise_stable() {
+
     install_red_hat_linux_stable || return 1
     return 0
 }
 
 install_red_hat_enterprise_git() {
+
     install_red_hat_linux_git || return 1
     return 0
 }
 
 install_red_hat_enterprise_onedir() {
+
     install_red_hat_linux_onedir || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_stable() {
+
     install_red_hat_linux_stable || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_git() {
+
     install_red_hat_linux_git || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_onedir() {
+
     install_red_hat_linux_onedir || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_stable() {
+
     install_red_hat_linux_stable || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_git() {
+
     install_red_hat_linux_git || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_onedir() {
+
     install_red_hat_linux_onedir || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_stable() {
+
     install_red_hat_linux_stable || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_git() {
+
     install_red_hat_linux_git || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_onedir() {
+
     install_red_hat_linux_onedir || return 1
     return 0
 }
 
 install_red_hat_linux_stable_post() {
+
     install_centos_stable_post || return 1
     return 0
 }
 
 install_red_hat_linux_restart_daemons() {
+
     install_centos_restart_daemons || return 1
     return 0
 }
 
 install_red_hat_linux_git_post() {
+
     install_centos_git_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_stable_post() {
+
     install_red_hat_linux_stable_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_restart_daemons() {
+
     install_red_hat_linux_restart_daemons || return 1
     return 0
 }
 
 install_red_hat_enterprise_git_post() {
+
     install_red_hat_linux_git_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_stable_post() {
+
     install_red_hat_linux_stable_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_restart_daemons() {
+
     install_red_hat_linux_restart_daemons || return 1
     return 0
 }
 
 install_red_hat_enterprise_linux_git_post() {
+
     install_red_hat_linux_git_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_stable_post() {
+
     install_red_hat_linux_stable_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_restart_daemons() {
+
     install_red_hat_linux_restart_daemons || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_git_post() {
+
     install_red_hat_linux_git_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_stable_post() {
+
     install_red_hat_linux_stable_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_restart_daemons() {
+
     install_red_hat_linux_restart_daemons || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_git_post() {
+
     install_red_hat_linux_git_post || return 1
     return 0
 }
 
 install_red_hat_linux_testing_deps() {
+
     install_centos_testing_deps || return 1
     return 0
 }
 
 install_red_hat_linux_testing() {
+
     install_centos_testing || return 1
     return 0
 }
 
 install_red_hat_linux_testing_post() {
+
     install_centos_testing_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_testing_deps() {
+
     install_centos_testing_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_testing() {
+
     install_centos_testing || return 1
     return 0
 }
 
 install_red_hat_enterprise_testing_post() {
+
     install_centos_testing_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_testing_deps() {
+
     install_centos_testing_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_testing() {
+
     install_centos_testing || return 1
     return 0
 }
 
 install_red_hat_enterprise_server_testing_post() {
+
     install_centos_testing_post || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_testing_deps() {
+
     install_centos_testing_deps || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_testing() {
+
     install_centos_testing || return 1
     return 0
 }
 
 install_red_hat_enterprise_workstation_testing_post() {
+
     install_centos_testing_post || return 1
     return 0
 }
@@ -5559,11 +4996,13 @@ install_red_hat_enterprise_workstation_testing_post() {
 #   Oracle Linux Install Functions
 #
 install_oracle_linux_stable_deps() {
+
     # Install Oracle's EPEL.
-    if [ ${_EPEL_REPOS_INSTALLED} -eq $BS_FALSE ]; then
+    if [ "${_EPEL_REPOS_INSTALLED}" -eq $BS_FALSE ]; then
         _EPEL_REPO=oracle-epel-release-el${DISTRO_MAJOR_VERSION}
         if ! rpm -q "${_EPEL_REPO}" > /dev/null; then
-            __yum_install_noinput "${_EPEL_REPO}"
+            # shellcheck disable=SC2086
+            __yum_install_noinput ${_EPEL_REPO}
         fi
         _EPEL_REPOS_INSTALLED=$BS_TRUE
     fi
@@ -5643,7 +5082,7 @@ install_oracle_linux_check_services() {
 
 #######################################################################################################################
 #
-#   AlmaLinux Install Functions
+#   ALmaLinux Install Functions
 #
 install_almalinux_stable_deps() {
     install_centos_stable_deps || return 1
@@ -5948,6 +5387,7 @@ install_cloud_linux_check_services() {
 #   Alpine Linux Install Functions
 #
 install_alpine_linux_stable_deps() {
+    _PIP_INSTALL_ARGS=""
     if ! grep -q '^[^#].\+alpine/.\+/community' /etc/apk/repositories; then
         # Add community repository entry based on the "main" repo URL
         __REPO=$(grep '^[^#].\+alpine/.\+/main\>' /etc/apk/repositories)
@@ -5966,41 +5406,24 @@ install_alpine_linux_stable_deps() {
 }
 
 install_alpine_linux_git_deps() {
+    _PIP_INSTALL_ARGS=""
     install_alpine_linux_stable_deps || return 1
 
     if ! __check_command_exists git; then
         apk -U add git  || return 1
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        apk -U add python2 py-virtualenv py2-crypto py2-m2crypto py2-setuptools \
-            py2-jinja2 py2-yaml py2-markupsafe py2-msgpack py2-psutil \
-            py2-zmq zeromq py2-requests || return 1
-
-        if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-            # We're on the master branch, install whichever tornado is on the requirements file
-            __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            if [ "${__REQUIRED_TORNADO}" != "" ]; then
-                apk -U add py2-tornado || return 1
-            fi
-        fi
-    else
-        apk -U add python3 python3-dev py3-pip py3-setuptools g++ linux-headers zeromq-dev openrc || return 1
-        _PY_EXE=python3
-        return 0
-    fi
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-    fi
+    apk -U add python3 python3-dev py3-pip py3-setuptools g++ linux-headers zeromq-dev openrc || return 1
+    _PY_EXE=python3
+    return 0
 }
 
 install_alpine_linux_stable() {
     __PACKAGES="salt"
+    _PIP_INSTALL_ARGS=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
         __PACKAGES="${__PACKAGES} salt-cloud"
@@ -6015,26 +5438,23 @@ install_alpine_linux_stable() {
         __PACKAGES="${__PACKAGES} salt-syndic"
     fi
 
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api"
+    fi
+
     # shellcheck disable=SC2086
-    apk -U add ${__PACKAGES} || return 1
+    apk -U add "${__PACKAGES}" || return 1
     return 0
 }
 
 install_alpine_linux_git() {
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        python2 setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install || return 1
-    else
-        python2 setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1
-    fi
+    _PIP_INSTALL_ARGS=""
+     __install_salt_from_repo "${_PY_EXE}" || return 1
+    return 0
 }
 
 install_alpine_linux_post() {
+    _PIP_INSTALL_ARGS=""
     for fname in api master minion syndic; do
         # Skip if not meant to be installed
         [ $fname = "api" ] && \
@@ -6064,6 +5484,7 @@ install_alpine_linux_post() {
 }
 
 install_alpine_linux_restart_daemons() {
+    _PIP_INSTALL_ARGS=""
     [ "${_START_DAEMONS}" -eq $BS_FALSE ] && return
 
     for fname in api master minion syndic; do
@@ -6082,6 +5503,7 @@ install_alpine_linux_restart_daemons() {
 }
 
 install_alpine_linux_check_services() {
+    _PIP_INSTALL_ARGS=""
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -6098,6 +5520,7 @@ install_alpine_linux_check_services() {
 }
 
 daemons_running_alpine_linux() {
+    _PIP_INSTALL_ARGS=""
     [ "${_START_DAEMONS}" -eq $BS_FALSE ] && return
 
     FAILED_DAEMONS=0
@@ -6131,167 +5554,21 @@ daemons_running_alpine_linux() {
 #   Amazon Linux AMI Install Functions
 #
 
-install_amazon_linux_ami_deps() {
-    # Shim to figure out if we're using old (rhel) or new (aws) rpms.
-    _USEAWS=$BS_FALSE
-    pkg_append="python"
-
-    if [ "$ITYPE" = "stable" ]; then
-        repo_rev="$STABLE_REV"
-    else
-        repo_rev="latest"
-    fi
-
-    if echo $repo_rev | grep -E -q '^archive'; then
-        year=$(echo "$repo_rev" | cut -d '/' -f 2 | cut -c1-4)
-    else
-        year=$(echo "$repo_rev" | cut -c1-4)
-    fi
-
-    if echo "$repo_rev" | grep -E -q '^(latest|2016\.11)$' || \
-            [ "$year" -gt 2016 ]; then
-       _USEAWS=$BS_TRUE
-       pkg_append="python27"
-    fi
-
-    # We need to install yum-utils before doing anything else when installing on
-    # Amazon Linux ECS-optimized images. See issue #974.
-    __yum_install_noinput yum-utils
-
-    # Do upgrade early
-    if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
-        yum -y update || return 1
-    fi
-
-    if [ $_DISABLE_REPOS -eq $BS_FALSE ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
-        __REPO_FILENAME="salt.repo"
-
-        # Set a few vars to make life easier.
-        if [ $_USEAWS -eq $BS_TRUE ]; then
-           base_url="$HTTP_VAL://${_REPO_URL}/yum/amazon/latest/\$basearch/$repo_rev/"
-           gpg_key="${base_url}SALTSTACK-GPG-KEY.pub"
-           repo_name="SaltStack repo for Amazon Linux"
-        else
-           base_url="$HTTP_VAL://${_REPO_URL}/yum/redhat/6/\$basearch/$repo_rev/"
-           gpg_key="${base_url}SALTSTACK-GPG-KEY.pub"
-           repo_name="SaltStack repo for RHEL/CentOS 6"
-        fi
-
-        # This should prob be refactored to use __install_saltstack_rhel_repository()
-        # With args passed in to do the right thing.  Reformatted to be more like the
-        # amazon linux yum file.
-        if [ ! -s "/etc/yum.repos.d/${__REPO_FILENAME}" ]; then
-          cat <<_eof > "/etc/yum.repos.d/${__REPO_FILENAME}"
-[saltstack-repo]
-name=$repo_name
-failovermethod=priority
-priority=10
-gpgcheck=1
-gpgkey=$gpg_key
-baseurl=$base_url
-_eof
-        fi
-
-    fi
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        # Package python-ordereddict-1.1-2.el6.noarch is obsoleted by python26-2.6.9-2.88.amzn1.x86_64
-        # which is already installed
-        __PACKAGES="m2crypto ${pkg_append}-crypto ${pkg_append}-jinja2 ${pkg_append}-PyYAML"
-        __PACKAGES="${__PACKAGES} ${pkg_append}-msgpack ${pkg_append}-requests ${pkg_append}-zmq"
-        __PACKAGES="${__PACKAGES} ${pkg_append}-futures"
-        # shellcheck disable=SC2086
-        __yum_install_noinput ${__PACKAGES} || return 1
-    fi
-
-    if [ "${_EXTRA_PACKAGES}" != "" ]; then
-        echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
-        # shellcheck disable=SC2086
-        __yum_install_noinput ${_EXTRA_PACKAGES} || return 1
-    fi
-}
-
-install_amazon_linux_ami_git_deps() {
-    if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
-        yum -y install ca-certificates || return 1
-    fi
-
-    PIP_EXE='pip'
-    if __check_command_exists python2.7; then
-        if ! __check_command_exists pip2.7; then
-            if ! __check_command_exists easy_install-2.7; then
-                __yum_install_noinput python27-setuptools
-            fi
-            /usr/bin/easy_install-2.7 pip || return 1
-        fi
-        PIP_EXE='/usr/local/bin/pip2.7'
-        _PY_EXE='python2.7'
-    fi
-
-    install_amazon_linux_ami_deps || return 1
-
-    if ! __check_command_exists git; then
-        __yum_install_noinput git || return 1
-    fi
-
-    __git_clone_and_checkout || return 1
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        __PACKAGES=""
-        __PIP_PACKAGES=""
-
-        if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-            __check_pip_allowed "You need to allow pip based installations (-P) in order to install apache-libcloud"
-            __PACKAGES="${__PACKAGES} python27-pip"
-            __PIP_PACKAGES="${__PIP_PACKAGES} apache-libcloud>=$_LIBCLOUD_MIN_VERSION"
-        fi
-
-        if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-            # We're on the master branch, install whichever tornado is on the requirements file
-            __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            if [ "${__REQUIRED_TORNADO}" != "" ]; then
-                __PACKAGES="${__PACKAGES} ${pkg_append}-tornado"
-            fi
-        fi
-
-        if [ "${__PACKAGES}" != "" ]; then
-            # shellcheck disable=SC2086
-            __yum_install_noinput ${__PACKAGES} || return 1
-        fi
-
-        if [ "${__PIP_PACKAGES}" != "" ]; then
-            # shellcheck disable=SC2086
-            ${PIP_EXE} install ${__PIP_PACKAGES} || return 1
-        fi
-    else
-        __PACKAGES="python27-pip python27-setuptools python27-devel gcc"
-            # shellcheck disable=SC2086
-        __yum_install_noinput ${__PACKAGES} || return 1
-    fi
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-    fi
-
-    return 0
-}
-
+# Support for Amazon Linux 2
 install_amazon_linux_ami_2_git_deps() {
     if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
         yum -y install ca-certificates || return 1
     fi
 
+    if [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
+    fi
+
     install_amazon_linux_ami_2_deps || return 1
 
-    if [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-        PY_PKG_VER=2
-        PIP_EXE='/bin/pip'
-    else
-        PY_PKG_VER=3
-        PIP_EXE='/bin/pip3'
-    fi
+    PY_PKG_VER=3
+    PIP_EXE='/bin/pip3'
     __PACKAGES="python${PY_PKG_VER}-pip"
 
     if ! __check_command_exists "${PIP_EXE}"; then
@@ -6303,63 +5580,17 @@ install_amazon_linux_ami_2_git_deps() {
         __yum_install_noinput git || return 1
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
+    __PACKAGES="python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools python${PY_PKG_VER}-devel gcc sudo"
 
-        __PACKAGES=""
-        __PIP_PACKAGES=""
-
-        if [ "$_INSTALL_CLOUD" -eq "$BS_TRUE" ]; then
-            __check_pip_allowed "You need to allow pip based installations (-P) in order to install apache-libcloud"
-            if [ "$PARSED_VERSION" -eq "2" ]; then
-                if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq "3" ]; then
-                    __PACKAGES="${__PACKAGES} python3-pip"
-                    __PIP_PACKAGES="${__PIP_PACKAGES} tornado<$_TORNADO_MAX_PY3_VERSION"
-                else
-                    __PACKAGES="${__PACKAGES} python2-pip"
-                fi
-            else
-                __PACKAGES="${__PACKAGES} python27-pip"
-            fi
-            __PIP_PACKAGES="${__PIP_PACKAGES} apache-libcloud>=$_LIBCLOUD_MIN_VERSION"
-        fi
-
-        if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-            # We're on the master branch, install whichever tornado is on the requirements file
-            __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            if [ "${__REQUIRED_TORNADO}" != "" ]; then
-                if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq "3" ]; then
-                    __PIP_PACKAGES="${__PIP_PACKAGES} tornado<$_TORNADO_MAX_PY3_VERSION"
-                else
-                    __PACKAGES="${__PACKAGES} ${pkg_append}${PY_PKG_VER}-tornado"
-                fi
-            fi
-        fi
-
-        if [ "${__PIP_PACKAGES}" != "" ]; then
-            __check_pip_allowed "You need to allow pip based installations (-P) in order to install ${__PIP_PACKAGES}"
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-pip"
-        fi
-
-        if [ "${__PACKAGES}" != "" ]; then
-            # shellcheck disable=SC2086
-            __yum_install_noinput ${__PACKAGES} || return 1
-        fi
-
-        if [ "${__PIP_PACKAGES}" != "" ]; then
-            # shellcheck disable=SC2086
-            ${PIP_EXE} install ${__PIP_PACKAGES} || return 1
-        fi
-    else
-        __PACKAGES="python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools python${PY_PKG_VER}-devel gcc"
-        # shellcheck disable=SC2086
-        __yum_install_noinput ${__PACKAGES} || return 1
-    fi
+    # shellcheck disable=SC2086
+    __yum_install_noinput ${__PACKAGES} || return 1
 
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
@@ -6367,31 +5598,16 @@ install_amazon_linux_ami_2_git_deps() {
 }
 
 install_amazon_linux_ami_2_deps() {
-    # Shim to figure out if we're using old (rhel) or new (aws) rpms.
-    _USEAWS=$BS_FALSE
-    pkg_append="python"
-
-    if [ "$ITYPE" = "stable" ]; then
-        repo_rev="$STABLE_REV"
-    else
-        repo_rev="latest"
-    fi
-
-    if echo $repo_rev | grep -E -q '^archive'; then
-        year=$(echo "$repo_rev" | cut -d '/' -f 2 | cut -c1-4)
-    else
-        year=$(echo "$repo_rev" | cut -c1-4)
-    fi
-
-    if echo "$repo_rev" | grep -E -q '^(latest|2016\.11)$' || \
-            [ "$year" -gt 2016 ]; then
-       _USEAWS=$BS_TRUE
-       pkg_append="python"
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
     # We need to install yum-utils before doing anything else when installing on
     # Amazon Linux ECS-optimized images. See issue #974.
-    __yum_install_noinput yum-utils
+    __PACKAGES="yum-utils sudo"
+
+    __yum_install_noinput ${__PACKAGES}
 
     # Do upgrade early
     if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
@@ -6399,56 +5615,69 @@ install_amazon_linux_ami_2_deps() {
     fi
 
     if [ $_DISABLE_REPOS -eq $BS_FALSE ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
-        __REPO_FILENAME="salt.repo"
-        __PY_VERSION_REPO="yum"
-        PY_PKG_VER=""
-        repo_label="saltstack-repo"
-        repo_name="SaltStack repo for Amazon Linux 2"
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            __REPO_FILENAME="salt.repo"
-            __PY_VERSION_REPO="py3"
-            PY_PKG_VER=3
-            repo_label="saltstack-py3-repo"
-            repo_name="SaltStack Python 3 repo for Amazon Linux 2"
+        if [ ! -s "${YUM_REPO_FILE}" ]; then
+            ## Amazon Linux yum (v3) doesn't support config-manager
+            ## FETCH_URL="https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo"
+            ## __fetch_url "${YUM_REPO_FILE}" "${FETCH_URL}"
+            # shellcheck disable=SC2129
+            if [ "$STABLE_REV" != "latest" ]; then
+                # 3006.x is default, and latest for 3006.x branch
+                if [ "$(echo "$STABLE_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+                    # latest version for branch 3006 | 3007
+                    REPO_REV_MAJOR=$(echo "$STABLE_REV" | cut -d '.' -f 1)
+                    if [ "$REPO_REV_MAJOR" -eq "3007" ]; then
+                        # Enable the Salt 3007 STS repo
+                        echo "[salt-repo-3007-sts]" > "${YUM_REPO_FILE}"
+                        echo "name=Salt Repo for Salt v3007 STS" >> "${YUM_REPO_FILE}"
+                        echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                        echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                        echo "priority=10" >> "${YUM_REPO_FILE}"
+                        echo "enabled=1" >> "${YUM_REPO_FILE}"
+                        echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                        echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                        echo "exclude=*3006* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                        echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                    else
+                        # Salt 3006 repo
+                        echo "[salt-repo-3006-lts]" > "${YUM_REPO_FILE}"
+                        echo "name=Salt Repo for Salt v3006 LTS" >> "${YUM_REPO_FILE}"
+                        echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                        echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                        echo "priority=10" >> "${YUM_REPO_FILE}"
+                        echo "enabled=1" >> "${YUM_REPO_FILE}"
+                        echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                        echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                        echo "exclude=*3007* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                        echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                    fi
+                elif [ "$(echo "$STABLE_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+                    # using minor version
+                    STABLE_REV_DOT=$(echo "$STABLE_REV" | sed 's/-/\./')
+                    echo "[salt-repo-${STABLE_REV_DOT}-lts]" > "${YUM_REPO_FILE}"
+                    echo "name=Salt Repo for Salt v${STABLE_REV_DOT} LTS" >> "${YUM_REPO_FILE}"
+                    echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                    echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                    echo "priority=10" >> "${YUM_REPO_FILE}"
+                    echo "enabled=1" >> "${YUM_REPO_FILE}"
+                    echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                fi
+            else
+                # Enable the Salt LATEST repo
+                echo "[salt-repo-latest]" > "${YUM_REPO_FILE}"
+                echo "name=Salt Repo for Salt LATEST release" >> "${YUM_REPO_FILE}"
+                echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                echo "priority=10" >> "${YUM_REPO_FILE}"
+                echo "enabled=1" >> "${YUM_REPO_FILE}"
+                echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+            fi
+            yum clean expire-cache || return 1
+            yum makecache || return 1
         fi
-
-        base_url="$HTTP_VAL://${_REPO_URL}/${__PY_VERSION_REPO}/amazon/2/\$basearch/$repo_rev/"
-        gpg_key="${base_url}SALTSTACK-GPG-KEY.pub,${base_url}base/RPM-GPG-KEY-CentOS-7"
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            gpg_key="${base_url}SALTSTACK-GPG-KEY.pub"
-        fi
-
-        # This should prob be refactored to use __install_saltstack_rhel_repository()
-        # With args passed in to do the right thing.  Reformatted to be more like the
-        # amazon linux yum file.
-        if [ ! -s "/etc/yum.repos.d/${__REPO_FILENAME}" ]; then
-          cat <<_eof > "/etc/yum.repos.d/${__REPO_FILENAME}"
-[$repo_label]
-name=$repo_name
-failovermethod=priority
-priority=10
-gpgcheck=1
-gpgkey=$gpg_key
-baseurl=$base_url
-_eof
-        fi
-
-    fi
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        # Package python-ordereddict-1.1-2.el6.noarch is obsoleted by python26-2.6.9-2.88.amzn1.x86_64
-        # which is already installed
-        if [ -n "${PY_PKG_VER}" ] && [ "${PY_PKG_VER}" -eq 3 ]; then
-            __PACKAGES="${pkg_append}${PY_PKG_VER}-m2crypto ${pkg_append}${PY_PKG_VER}-pyyaml"
-        else
-            __PACKAGES="m2crypto PyYAML ${pkg_append}-futures"
-        fi
-
-        __PACKAGES="${__PACKAGES} ${pkg_append}${PY_PKG_VER}-crypto ${pkg_append}${PY_PKG_VER}-jinja2 procps-ng"
-        __PACKAGES="${__PACKAGES} ${pkg_append}${PY_PKG_VER}-msgpack ${pkg_append}${PY_PKG_VER}-requests ${pkg_append}${PY_PKG_VER}-zmq"
-
-        # shellcheck disable=SC2086
-        __yum_install_noinput ${__PACKAGES} || return 1
     fi
 
     if [ "${_EXTRA_PACKAGES}" != "" ]; then
@@ -6459,25 +5688,16 @@ _eof
 }
 
 install_amazon_linux_ami_2_onedir_deps() {
-    # Shim to figure out if we're using old (rhel) or new (aws) rpms.
-    _USEAWS=$BS_FALSE
-    pkg_append="python"
-
-    if [ "$ITYPE" = "onedir" ]; then
-        repo_rev="$ONEDIR_REV"
-    else
-        repo_rev="latest"
-    fi
-
-    if echo $repo_rev | grep -E -q '^archive'; then
-        year=$(echo "$repo_rev" | cut -d '/' -f 2 | cut -c1-4)
-    else
-        year=$(echo "$repo_rev" | cut -c1-4)
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
     # We need to install yum-utils before doing anything else when installing on
     # Amazon Linux ECS-optimized images. See issue #974.
-    __yum_install_noinput yum-utils
+    __PACKAGES="yum-utils chkconfig procps-ng findutils sudo"
+
+    __yum_install_noinput ${__PACKAGES}
 
     # Do upgrade early
     if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
@@ -6485,64 +5705,69 @@ install_amazon_linux_ami_2_onedir_deps() {
     fi
 
     if [ $_DISABLE_REPOS -eq $BS_FALSE ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
-        __REPO_FILENAME="salt.repo"
-        __PY_VERSION_REPO="yum"
-        PY_PKG_VER=""
-        repo_label="saltstack-repo"
-        repo_name="SaltStack repo for Amazon Linux 2"
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            __REPO_FILENAME="salt.repo"
-            __PY_VERSION_REPO="py3"
-            PY_PKG_VER=3
-            repo_label="saltstack-py3-repo"
-            repo_name="SaltStack Python 3 repo for Amazon Linux 2"
+        if [ ! -s "${YUM_REPO_FILE}" ]; then
+            ## Amazon Linux yum (v3) doesn't support config-manager
+            ## FETCH_URL="https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo"
+            ## __fetch_url "${YUM_REPO_FILE}" "${FETCH_URL}"
+            # shellcheck disable=SC2129
+            if [ "$ONEDIR_REV" != "latest" ]; then
+                # 3006.x is default, and latest for 3006.x branch
+                if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+                    # latest version for branch 3006 | 3007
+                    REPO_REV_MAJOR=$(echo "$ONEDIR_REV" | cut -d '.' -f 1)
+                    if [ "$REPO_REV_MAJOR" -eq "3007" ]; then
+                        # Enable the Salt 3007 STS repo
+                        echo "[salt-repo-3007-sts]" > "${YUM_REPO_FILE}"
+                        echo "name=Salt Repo for Salt v3007 STS" >> "${YUM_REPO_FILE}"
+                        echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                        echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                        echo "priority=10" >> "${YUM_REPO_FILE}"
+                        echo "enabled=1" >> "${YUM_REPO_FILE}"
+                        echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                        echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                        echo "exclude=*3006* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                        echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                    else
+                        # Salt 3006 repo
+                        echo "[salt-repo-3006-lts]" > "${YUM_REPO_FILE}"
+                        echo "name=Salt Repo for Salt v3006 LTS" >> "${YUM_REPO_FILE}"
+                        echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                        echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                        echo "priority=10" >> "${YUM_REPO_FILE}"
+                        echo "enabled=1" >> "${YUM_REPO_FILE}"
+                        echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                        echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                        echo "exclude=*3007* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                        echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                    fi
+                elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+                    # using minor version
+                    ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+                    echo "[salt-repo-${ONEDIR_REV_DOT}-lts]" > "${YUM_REPO_FILE}"
+                    echo "name=Salt Repo for Salt v${ONEDIR_REV_DOT} LTS" >> "${YUM_REPO_FILE}"
+                    echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                    echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                    echo "priority=10" >> "${YUM_REPO_FILE}"
+                    echo "enabled=1" >> "${YUM_REPO_FILE}"
+                    echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                fi
+            else
+                # Enable the Salt LATEST repo
+                echo "[salt-repo-latest]" > "${YUM_REPO_FILE}"
+                echo "name=Salt Repo for Salt LATEST release" >> "${YUM_REPO_FILE}"
+                echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                echo "priority=10" >> "${YUM_REPO_FILE}"
+                echo "enabled=1" >> "${YUM_REPO_FILE}"
+                echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+            fi
+            yum clean expire-cache || return 1
+            yum makecache || return 1
         fi
-
-        base_url="$HTTP_VAL://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/amazon/2/\$basearch/$repo_rev/"
-        if [ "${ONEDIR_REV}" = "nightly" ] ; then
-            base_url="$HTTP_VAL://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/amazon/2/\$basearch/"
-        fi
-
-        if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ] || [ "${ONEDIR_REV}" = "nightly" ]; then
-          gpg_key="${base_url}SALTSTACK-GPG-KEY.pub,${base_url}base/RPM-GPG-KEY-CentOS-7"
-          if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            gpg_key="${base_url}SALTSTACK-GPG-KEY.pub"
-          fi
-        else
-          gpg_key="${base_url}SALT-PROJECT-GPG-PUBKEY-2023.pub"
-        fi
-
-        # This should prob be refactored to use __install_saltstack_rhel_repository()
-        # With args passed in to do the right thing.  Reformatted to be more like the
-        # amazon linux yum file.
-        if [ ! -s "/etc/yum.repos.d/${__REPO_FILENAME}" ]; then
-          cat <<_eof > "/etc/yum.repos.d/${__REPO_FILENAME}"
-[$repo_label]
-name=$repo_name
-failovermethod=priority
-priority=10
-gpgcheck=1
-gpgkey=$gpg_key
-baseurl=$base_url
-_eof
-        fi
-
-    fi
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        # Package python-ordereddict-1.1-2.el6.noarch is obsoleted by python26-2.6.9-2.88.amzn1.x86_64
-        # which is already installed
-        if [ -n "${PY_PKG_VER}" ] && [ "${PY_PKG_VER}" -eq 3 ]; then
-            __PACKAGES="${pkg_append}${PY_PKG_VER}-m2crypto ${pkg_append}${PY_PKG_VER}-pyyaml"
-        else
-            __PACKAGES="m2crypto PyYAML ${pkg_append}-futures"
-        fi
-
-        __PACKAGES="${__PACKAGES} ${pkg_append}${PY_PKG_VER}-crypto ${pkg_append}${PY_PKG_VER}-jinja2 procps-ng"
-        __PACKAGES="${__PACKAGES} ${pkg_append}${PY_PKG_VER}-msgpack ${pkg_append}${PY_PKG_VER}-requests ${pkg_append}${PY_PKG_VER}-zmq"
-
-        # shellcheck disable=SC2086
-        __yum_install_noinput ${__PACKAGES} || return 1
     fi
 
     if [ "${_EXTRA_PACKAGES}" != "" ]; then
@@ -6552,41 +5777,6 @@ _eof
     fi
 }
 
-install_amazon_linux_ami_stable() {
-    install_centos_stable || return 1
-    return 0
-}
-
-install_amazon_linux_ami_stable_post() {
-    install_centos_stable_post || return 1
-    return 0
-}
-
-install_amazon_linux_ami_restart_daemons() {
-    install_centos_restart_daemons || return 1
-    return 0
-}
-
-install_amazon_linux_ami_git() {
-    install_centos_git || return 1
-    return 0
-}
-
-install_amazon_linux_ami_git_post() {
-    install_centos_git_post || return 1
-    return 0
-}
-
-install_amazon_linux_ami_testing() {
-    install_centos_testing || return 1
-    return 0
-}
-
-install_amazon_linux_ami_testing_post() {
-    install_centos_testing_post || return 1
-    return 0
-}
-
 install_amazon_linux_ami_2_stable() {
     install_centos_stable || return 1
     return 0
@@ -6637,6 +5827,188 @@ install_amazon_linux_ami_2_onedir_post() {
     return 0
 }
 
+# Support for Amazon Linux 2023
+# the following code needs adjustment to allow for 2023, 2024, 2025, etc - 2023 for now
+install_amazon_linux_ami_2023_git_deps() {
+    if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
+        yum -y install ca-certificates || return 1
+    fi
+
+    install_amazon_linux_ami_2023_onedir_deps || return 1
+
+    PY_PKG_VER=3
+    PIP_EXE='/bin/pip3'
+    __PACKAGES="python${PY_PKG_VER}-pip"
+
+    if ! __check_command_exists "${PIP_EXE}"; then
+        # shellcheck disable=SC2086
+        __yum_install_noinput ${__PACKAGES} || return 1
+    fi
+
+    if ! __check_command_exists git; then
+        __yum_install_noinput git || return 1
+    fi
+
+    # shellcheck disable=SC2119
+    __git_clone_and_checkout || return 1
+
+    __PACKAGES="python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools python${PY_PKG_VER}-devel gcc sudo"
+
+    # shellcheck disable=SC2086
+    __yum_install_noinput ${__PACKAGES} || return 1
+
+    # Let's trigger config_salt()
+    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
+        CONFIG_SALT_FUNC="config_salt"
+    fi
+
+    return 0
+}
+
+install_amazon_linux_ami_2023_deps() {
+
+    # Set ONEDIR_REV to STABLE_REV
+    ONEDIR_REV=${STABLE_REV}
+    install_amazon_linux_ami_2023_onedir_deps || return 1
+}
+
+install_amazon_linux_ami_2023_onedir_deps() {
+
+    # We need to install yum-utils before doing anything else when installing on
+    # Amazon Linux ECS-optimized images. See issue #974.
+    __PACKAGES="yum-utils chkconfig procps-ng findutils sudo"
+
+    __yum_install_noinput ${__PACKAGES}
+
+    # Do upgrade early
+    if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
+        yum -y update || return 1
+    fi
+
+    if [ $_DISABLE_REPOS -eq $BS_FALSE ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
+        if [ ! -s "${YUM_REPO_FILE}" ]; then
+            ## Amazon Linux yum (v3) doesn't support config-manager
+            ## FETCH_URL="https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo"
+            ## __fetch_url "${YUM_REPO_FILE}" "${FETCH_URL}"
+            # shellcheck disable=SC2129
+            if [ "$ONEDIR_REV" != "latest" ]; then
+                # 3006.x is default, and latest for 3006.x branch
+                if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+                    # latest version for branch 3006 | 3007
+                    REPO_REV_MAJOR=$(echo "$ONEDIR_REV" | cut -d '.' -f 1)
+                    if [ "$REPO_REV_MAJOR" -eq "3007" ]; then
+                        # Enable the Salt 3007 STS repo
+                        echo "[salt-repo-3007-sts]" > "${YUM_REPO_FILE}"
+                        echo "name=Salt Repo for Salt v3007 STS" >> "${YUM_REPO_FILE}"
+                        echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                        echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                        echo "priority=10" >> "${YUM_REPO_FILE}"
+                        echo "enabled=1" >> "${YUM_REPO_FILE}"
+                        echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                        echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                        echo "exclude=*3006* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                        echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                    else
+                        # Salt 3006 repo
+                        echo "[salt-repo-3006-lts]" > "${YUM_REPO_FILE}"
+                        echo "name=Salt Repo for Salt v3006 LTS" >> "${YUM_REPO_FILE}"
+                        echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                        echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                        echo "priority=10" >> "${YUM_REPO_FILE}"
+                        echo "enabled=1" >> "${YUM_REPO_FILE}"
+                        echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                        echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                        echo "exclude=*3007* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                        echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                    fi
+                elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+                    # using minor version
+                    ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+                    echo "[salt-repo-${ONEDIR_REV_DOT}-lts]" > "${YUM_REPO_FILE}"
+                    echo "name=Salt Repo for Salt v${ONEDIR_REV_DOT} LTS" >> "${YUM_REPO_FILE}"
+                    echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                    echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                    echo "priority=10" >> "${YUM_REPO_FILE}"
+                    echo "enabled=1" >> "${YUM_REPO_FILE}"
+                    echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                fi
+            else
+                # Enable the Salt LATEST repo
+                echo "[salt-repo-latest]" > "${YUM_REPO_FILE}"
+                echo "name=Salt Repo for Salt LATEST release" >> "${YUM_REPO_FILE}"
+                echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                echo "priority=10" >> "${YUM_REPO_FILE}"
+                echo "enabled=1" >> "${YUM_REPO_FILE}"
+                echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+            fi
+            yum clean expire-cache || return 1
+            yum makecache || return 1
+        fi
+    fi
+
+    if [ "${_EXTRA_PACKAGES}" != "" ]; then
+        echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
+        # shellcheck disable=SC2086
+        __yum_install_noinput ${_EXTRA_PACKAGES} || return 1
+    fi
+}
+
+install_amazon_linux_ami_2023_stable() {
+    install_centos_stable || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_stable_post() {
+    install_centos_stable_post || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_restart_daemons() {
+    install_centos_restart_daemons || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_git() {
+    install_centos_git || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_git_post() {
+    install_centos_git_post || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_testing() {
+    install_centos_testing || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_testing_post() {
+    install_centos_testing_post || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_check_services() {
+    install_centos_check_services || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_onedir() {
+    install_centos_stable || return 1
+    return 0
+}
+
+install_amazon_linux_ami_2023_onedir_post() {
+    install_centos_stable_post || return 1
+    return 0
+}
+
 #
 #   Ended Amazon Linux AMI Install Functions
 #
@@ -6664,7 +6036,8 @@ install_arch_linux_stable_deps() {
     fi
 
     if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-        PY_PKG_VER=2
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     else
         PY_PKG_VER=""
     fi
@@ -6672,6 +6045,7 @@ install_arch_linux_stable_deps() {
     # YAML module is used for generating custom master/minion configs
     # shellcheck disable=SC2086
     pacman -Su --noconfirm --needed python${PY_PKG_VER}-yaml
+    pacman -Su --noconfirm --needed python${PY_PKG_VER}-tornado
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
         # shellcheck disable=SC2086
@@ -6693,35 +6067,24 @@ install_arch_linux_git_deps() {
         pacman -Sy --noconfirm --needed git  || return 1
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        pacman -R --noconfirm python2-distribute
-        pacman -Su --noconfirm --needed python2-crypto python2-setuptools python2-jinja \
-            python2-m2crypto python2-markupsafe python2-msgpack python2-psutil \
-            python2-pyzmq zeromq python2-requests python2-systemd || return 1
-
-        if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-            # We're on the master branch, install whichever tornado is on the requirements file
-            __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            if [ "${__REQUIRED_TORNADO}" != "" ]; then
-                pacman -Su --noconfirm --needed python2-tornado
-            fi
-        fi
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     else
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-            PY_PKG_VER=2
-        else
-            PY_PKG_VER=""
-        fi
-        __PACKAGES="python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
-        # shellcheck disable=SC2086
-        pacman -Su --noconfirm --needed ${__PACKAGES}
+        PY_PKG_VER=""
     fi
 
+    __PACKAGES="python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
+
+    # shellcheck disable=SC2086
+    pacman -Su --noconfirm --needed ${__PACKAGES}
+
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
@@ -6749,19 +6112,16 @@ install_arch_linux_stable() {
 }
 
 install_arch_linux_git() {
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
+    fi
 
-    _POST_NEON_PIP_INSTALL_ARGS="${_POST_NEON_PIP_INSTALL_ARGS} --use-pep517"
+    _PIP_INSTALL_ARGS="${_PIP_INSTALL_ARGS} --use-pep517"
     _PIP_DOWNLOAD_ARGS="${_PIP_DOWNLOAD_ARGS} --use-pep517"
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
 
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        python2 setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install || return 1
-    else
-        python2 setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1
-    fi
+    __install_salt_from_repo "${_PY_EXE}" || return 1
+
     return 0
 }
 
@@ -6785,7 +6145,7 @@ install_arch_linux_post() {
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
 
-        if [ -f /usr/bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             # Using systemd
             /usr/bin/systemctl is-enabled salt-$fname.service > /dev/null 2>&1 || (
                 /usr/bin/systemctl preset salt-$fname.service > /dev/null 2>&1 &&
@@ -6816,7 +6176,7 @@ install_arch_linux_git_post() {
           _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm"
         fi
 
-        if [ -f /usr/bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service"
 
             # Skip salt-api since the service should be opt-in and not necessarily started on boot
@@ -6838,7 +6198,7 @@ install_arch_linux_git_post() {
 }
 
 install_arch_linux_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
 
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
@@ -6849,7 +6209,7 @@ install_arch_linux_restart_daemons() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /usr/bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             /usr/bin/systemctl stop salt-$fname.service > /dev/null 2>&1
             /usr/bin/systemctl start salt-$fname.service && continue
             echodebug "Failed to start salt-$fname using systemd"
@@ -6865,7 +6225,7 @@ install_arch_linux_restart_daemons() {
 }
 
 install_arch_check_services() {
-    if [ ! -f /usr/bin/systemctl ]; then
+    if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
         # Not running systemd!? Don't check!
         return 0
     fi
@@ -6906,58 +6266,138 @@ install_arch_linux_onedir_post() {
 #   Photon OS Install Functions
 #
 
-__install_saltstack_photon_onedir_repository() {
-    if [ "$ITYPE" = "stable" ]; then
-        REPO_REV="$ONEDIR_REV"
+#---  FUNCTION  -------------------------------------------------------------------------------------------------------
+#          NAME:  __rpm_get_packagesite_onedir_latest
+#   DESCRIPTION:  Set _GENERIC_PKG_VERSION to the latest for RPM or latest for major version input
+#----------------------------------------------------------------------------------------------------------------------
+__get_packagesite_onedir_latest() {
+
+    echodebug "Find latest rpm release from repository"
+
+    # get dir listing from url, sort and pick highest
+    generic_versions_tmpdir=$(mktemp -d)
+    curr_pwd=$(pwd)
+    cd  ${generic_versions_tmpdir} || return 1
+
+    # leverage the windows directories since release Windows and Linux
+    wget -q -r -np -nH --exclude-directories=onedir,relenv,macos -x -l 1 "https://${_REPO_URL}/saltproject-generic/windows/"
+    if [ "$#" -gt 0 ] && [ -n "$1" ]; then
+        MAJOR_VER="$1"
+        # shellcheck disable=SC2010
+        _GENERIC_PKG_VERSION=$(ls artifactory/saltproject-generic/windows/ | grep -v 'index.html' | sort -V -u | grep -E "$MAJOR_VER" | tail -n 1)
     else
-        REPO_REV="latest"
+        # shellcheck disable=SC2010
+        _GENERIC_PKG_VERSION=$(ls artifactory/saltproject-generic/windows/ | grep -v 'index.html' | sort -V -u | tail -n 1)
+    fi
+    cd ${curr_pwd} || return "${_GENERIC_PKG_VERSION}"
+    rm -fR ${generic_versions_tmpdir}
+
+    echodebug "latest rpm release from repository found ${_GENERIC_PKG_VERSION}"
+
+}
+
+
+__install_saltstack_photon_onedir_repository() {
+    echodebug "__install_saltstack_photon_onedir_repository() entry"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    __PY_VERSION_REPO="yum"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
-    fi
-
-    REPO_FILE="/etc/yum.repos.d/salt.repo"
-
-    if [ ! -s "$REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
-        FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/photon/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}"
-        if [ "${ONEDIR_REV}" = "nightly" ] ; then
-            FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/photon/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/"
+    if [ ! -s "$YUM_REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
+        ## Photon tdnf doesn't support config-manager
+        ## FETCH_URL="https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo"
+        ## __fetch_url "${YUM_REPO_FILE}" "${FETCH_URL}"
+        # shellcheck disable=SC2129
+        if [ "$ONEDIR_REV" != "latest" ]; then
+            # 3006.x is default, and latest for 3006.x branch
+            if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+                # latest version for branch 3006 | 3007
+                REPO_REV_MAJOR=$(echo "$ONEDIR_REV" | cut -d '.' -f 1)
+                if [ "$REPO_REV_MAJOR" -eq "3007" ]; then
+                    # Enable the Salt 3007 STS repo
+                    ## tdnf config-manager --set-disable salt-repo-*
+                    ## tdnf config-manager --set-enabled salt-repo-3007-sts
+                    echo "[salt-repo-3007-sts]" > "${YUM_REPO_FILE}"
+                    echo "name=Salt Repo for Salt v3007 STS" >> "${YUM_REPO_FILE}"
+                    echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                    echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                    echo "priority=10" >> "${YUM_REPO_FILE}"
+                    echo "enabled=1" >> "${YUM_REPO_FILE}"
+                    echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                    echo "exclude=*3006* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                    echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                else
+                    # Salt 3006 repo
+                    echo "[salt-repo-3006-lts]" > "${YUM_REPO_FILE}"
+                    echo "name=Salt Repo for Salt v3006 LTS" >> "${YUM_REPO_FILE}"
+                    echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                    echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                    echo "priority=10" >> "${YUM_REPO_FILE}"
+                    echo "enabled=1" >> "${YUM_REPO_FILE}"
+                    echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                    echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                    echo "exclude=*3007* *3008* *3009* *3010*" >> "${YUM_REPO_FILE}"
+                    echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+                fi
+            elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+                # using minor version
+                ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+                echo "[salt-repo-${ONEDIR_REV_DOT}-lts]" > "${YUM_REPO_FILE}"
+                echo "name=Salt Repo for Salt v${ONEDIR_REV_DOT} LTS" >> "${YUM_REPO_FILE}"
+                echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+                echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+                echo "priority=10" >> "${YUM_REPO_FILE}"
+                echo "enabled=1" >> "${YUM_REPO_FILE}"
+                echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+                echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+                echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
+            fi
+        else
+            # Enable the Salt LATEST repo
+            ## tdnf config-manager --set-disable salt-repo-*
+            ## tdnf config-manager --set-enabled salt-repo-latest
+            echo "[salt-repo-latest]" > "${YUM_REPO_FILE}"
+            echo "name=Salt Repo for Salt LATEST release" >> "${YUM_REPO_FILE}"
+            echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${YUM_REPO_FILE}"
+            echo "skip_if_unavailable=True" >> "${YUM_REPO_FILE}"
+            echo "priority=10" >> "${YUM_REPO_FILE}"
+            echo "enabled=1" >> "${YUM_REPO_FILE}"
+            echo "enabled_metadata=1" >> "${YUM_REPO_FILE}"
+            echo "gpgcheck=1" >> "${YUM_REPO_FILE}"
+            echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${YUM_REPO_FILE}"
         fi
-
-        __fetch_url "${REPO_FILE}" "${FETCH_URL}.repo"
-
-        GPG_KEY="SALT-PROJECT-GPG-PUBKEY-2023.pub"
-
-        __rpm_import_gpg "${FETCH_URL}/${GPG_KEY}" || return 1
-
         tdnf makecache || return 1
-    elif [ "$REPO_REV" != "latest" ]; then
+    elif [ "$ONEDIR_REV" != "latest" ]; then
         echowarn "salt.repo already exists, ignoring salt version argument."
-        echowarn "Use -F (forced overwrite) to install $REPO_REV."
+        echowarn "Use -F (forced overwrite) to install $ONEDIR_REV."
     fi
 
     return 0
 }
 
 install_photon_deps() {
+    echodebug "install_photon_deps() entry"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
+    fi
+
     if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
         tdnf -y update || return 1
     fi
 
     __PACKAGES="${__PACKAGES:=}"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -lt 3 ]; then
-        echoerror "There are no Python 2 stable packages for Fedora, only Py3 packages"
-        return 1
-    fi
-
     PY_PKG_VER=3
 
     __PACKAGES="${__PACKAGES} libyaml procps-ng python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2"
     __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests python${PY_PKG_VER}-zmq"
     __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-pip python${PY_PKG_VER}-m2crypto python${PY_PKG_VER}-pyyaml"
-    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-systemd"
+    __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-systemd sudo shadow"
+
     if [ "${_EXTRA_PACKAGES}" != "" ]; then
         echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
     fi
@@ -6969,6 +6409,8 @@ install_photon_deps() {
 }
 
 install_photon_stable_post() {
+    echodebug "install_photon_stable_post() entry"
+
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -6985,78 +6427,64 @@ install_photon_stable_post() {
 }
 
 install_photon_git_deps() {
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        # Packages are named python3-<whatever>
-        PY_PKG_VER=3
-    else
-        PY_PKG_VER=2
+    echodebug "install_photon_git_deps() entry"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
+    # Packages are named python3-<whatever>
+    PY_PKG_VER=3
+
     __PACKAGES=""
     if ! __check_command_exists ps; then
         __PACKAGES="${__PACKAGES} procps-ng"
     fi
+
     if ! __check_command_exists git; then
         __PACKAGES="${__PACKAGES} git"
     fi
 
+    if ! __check_command_exists sudo; then
+        __PACKAGES="${__PACKAGES} sudo"
+    fi
+
+    if ! __check_command_exists usermod; then
+        __PACKAGES="${__PACKAGES} shadow"
+    fi
+
     if [ -n "${__PACKAGES}" ]; then
         # shellcheck disable=SC2086
         __tdnf_install_noinput ${__PACKAGES} || return 1
         __PACKAGES=""
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
+    __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc glibc-devel linux-devel.x86_64 cython${PY_PKG_VER}"
 
-        if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
-            __PACKAGES="${__PACKAGES} ca-certificates"
-        fi
-        if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud python${PY_PKG_VER}-netaddr"
-        fi
+    echodebug "install_photon_git_deps() distro major version, ${DISTRO_MAJOR_VERSION}"
 
-        install_photon_deps || return 1
-
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-            if __check_command_exists python3; then
-                __python="python3"
-            fi
-        elif [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-            if __check_command_exists python2; then
-                __python="python2"
-            fi
-        else
-            if ! __check_command_exists python; then
-                echoerror "Unable to find a python binary?!"
-                return 1
-            fi
-            # Let's hope it's the right one
-            __python="python"
-        fi
-
-        grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" | while IFS='
-    '         read -r dep; do
-                echodebug "Running '${__python}' -m pip install '${dep}'"
-                "${__python}" -m pip install "${dep}" || return 1
-            done
-    else
-        __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc glibc-devel linux-devel.x86_64"
-        # shellcheck disable=SC2086
-        __tdnf_install_noinput ${__PACKAGES} || return 1
+    ## Photon 5 container is missing systemd on default installation
+    if [ "${DISTRO_MAJOR_VERSION}" -lt 5  ]; then
+        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado"
     fi
 
+    # shellcheck disable=SC2086
+    __tdnf_install_noinput ${__PACKAGES} || return 1
+
     if [ "${DISTRO_MAJOR_VERSION}" -gt 3 ]; then
       # Need newer version of setuptools on Photon
-      _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION}"
-      echodebug "Running '${_PY_EXE} -m pip --upgrade install ${_setuptools_dep}'"
+      _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION},<${_MAXIMUM_SETUPTOOLS_VERSION}"
+      echodebug "Running '${_PY_EXE} -m pip install --upgrade ${_setuptools_dep}'"
       ${_PY_EXE} -m pip install --upgrade "${_setuptools_dep}"
     fi
 
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
@@ -7064,13 +6492,18 @@ install_photon_git_deps() {
 }
 
 install_photon_git() {
+    echodebug "install_photon_git() entry"
+
     if [ "${_PY_EXE}" != "" ]; then
         _PYEXE=${_PY_EXE}
         echoinfo "Using the following python version: ${_PY_EXE} to install salt"
     else
-        _PYEXE='python2'
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     fi
 
+    install_photon_git_deps
+
     if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
         ${_PYEXE} setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1
     else
@@ -7080,6 +6513,8 @@ install_photon_git() {
 }
 
 install_photon_git_post() {
+    echodebug "install_photon_git_post() entry"
+
     for fname in api master minion syndic; do
         # Skip if not meant to be installed
         [ $fname = "api" ] && \
@@ -7111,7 +6546,9 @@ install_photon_git_post() {
 }
 
 install_photon_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
+    echodebug "install_photon_restart_daemons() entry"
+
 
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
@@ -7133,6 +6570,8 @@ install_photon_restart_daemons() {
 }
 
 install_photon_check_services() {
+    echodebug "install_photon_check_services() entry"
+
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
         [ $fname = "api" ] && continue
@@ -7149,6 +6588,8 @@ install_photon_check_services() {
 }
 
 install_photon_onedir_deps() {
+    echodebug "install_photon_onedir_deps() entry"
+
 
     if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
         tdnf -y update || return 1
@@ -7165,13 +6606,13 @@ install_photon_onedir_deps() {
     fi
 
     # If -R was passed, we need to configure custom repo url with rsync-ed packages
-    # Which is still handled in __install_saltstack_rhel_repository. This call has
-    # its own check in case -r was passed without -R.
+    # Which was handled in __install_saltstack_rhel_repository buu that hanlded old-stable which is for
+    # releases which are End-Of-Life. This call has its own check in case -r was passed without -R.
     if [ "$_CUSTOM_REPO_URL" != "null" ]; then
         __install_saltstack_photon_onedir_repository || return 1
     fi
 
-    __PACKAGES="procps-ng"
+    __PACKAGES="procps-ng sudo shadow"
 
     # shellcheck disable=SC2086
     __tdnf_install_noinput ${__PACKAGES} || return 1
@@ -7188,21 +6629,43 @@ install_photon_onedir_deps() {
 
 
 install_photon_onedir() {
+
+    echodebug "install_photon_onedir() entry"
+
     STABLE_REV=$ONEDIR_REV
+    _GENERIC_PKG_VERSION=""
+
+    if [ "$(echo "$STABLE_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+        # Major version Salt, config and repo already setup
+        __get_packagesite_onedir_latest "$STABLE_REV"
+        MINOR_VER_STRG="-$_GENERIC_PKG_VERSION"
+    elif [ "$(echo "$STABLE_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+        # Minor version Salt, need to add specific minor version
+        STABLE_REV_DOT=$(echo "$STABLE_REV" | sed 's/-/\./')
+        MINOR_VER_STRG="-$STABLE_REV_DOT"
+    else
+        # default to latest version Salt, config and repo already setup
+        __get_packagesite_onedir_latest
+        MINOR_VER_STRG="-$_GENERIC_PKG_VERSION"
+    fi
 
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-cloud"
+        __PACKAGES="${__PACKAGES} salt-cloud$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-master"
+        __PACKAGES="${__PACKAGES} salt-master$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-minion"
+        __PACKAGES="${__PACKAGES} salt-minion$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-syndic"
+        __PACKAGES="${__PACKAGES} salt-syndic$MINOR_VER_STRG"
+    fi
+
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api$MINOR_VER_STRG"
     fi
 
     # shellcheck disable=SC2086
@@ -7222,526 +6685,6 @@ install_photon_onedir_post() {
 #
 #######################################################################################################################
 
-#######################################################################################################################
-#
-#   FreeBSD Install Functions
-#
-
-# Using a separate conf step to head for idempotent install...
-__configure_freebsd_pkg_details() {
-    _SALT_ETC_DIR="/usr/local/etc/salt"
-    _PKI_DIR=${_SALT_ETC_DIR}/pki
-    _POST_NEON_PIP_INSTALL_ARGS="--prefix=/usr/local"
-}
-
-install_freebsd_deps() {
-    __configure_freebsd_pkg_details
-    pkg install -y pkg
-}
-
-install_freebsd_git_deps() {
-    install_freebsd_deps || return 1
-
-    if ! __check_command_exists git; then
-        /usr/local/sbin/pkg install -y git || return 1
-    fi
-    __git_clone_and_checkout || return 1
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-
-        SALT_DEPENDENCIES=$(/usr/local/sbin/pkg rquery %dn py39-salt)
-        # shellcheck disable=SC2086
-        /usr/local/sbin/pkg install -y ${SALT_DEPENDENCIES} python || return 1
-
-        /usr/local/sbin/pkg install -y py39-requests || return 1
-        /usr/local/sbin/pkg install -y py39-tornado4 || return 1
-
-    else
-        /usr/local/sbin/pkg install -y python py39-pip py39-setuptools libzmq4 libunwind || return 1
-    fi
-
-    echodebug "Adapting paths to FreeBSD"
-    # The list of files was taken from Salt's BSD port Makefile
-    for file in doc/man/salt-key.1 doc/man/salt-cp.1 doc/man/salt-minion.1 \
-                doc/man/salt-syndic.1 doc/man/salt-master.1 doc/man/salt-run.1 \
-                doc/man/salt.7 doc/man/salt.1 doc/man/salt-call.1; do
-        [ ! -f $file ] && continue
-        echodebug "Patching ${file}"
-        sed -in -e "s|/etc/salt|${_SALT_ETC_DIR}|" \
-                -e "s|/srv/salt|${_SALT_ETC_DIR}/states|" \
-                -e "s|/srv/pillar|${_SALT_ETC_DIR}/pillar|" ${file}
-    done
-    if [ ! -f salt/syspaths.py ]; then
-        # We still can't provide the system paths, salt 0.16.x
-        # Let's patch salt's source and adapt paths to what's expected on FreeBSD
-        echodebug "Replacing occurrences of '/etc/salt' with ${_SALT_ETC_DIR}"
-        # The list of files was taken from Salt's BSD port Makefile
-        for file in conf/minion conf/master salt/config.py salt/client.py \
-                    salt/modules/mysql.py salt/utils/parsers.py salt/modules/tls.py \
-                    salt/modules/postgres.py salt/utils/migrations.py; do
-            [ ! -f $file ] && continue
-            echodebug "Patching ${file}"
-            sed -in -e "s|/etc/salt|${_SALT_ETC_DIR}|" \
-                    -e "s|/srv/salt|${_SALT_ETC_DIR}/states|" \
-                    -e "s|/srv/pillar|${_SALT_ETC_DIR}/pillar|" ${file}
-        done
-    fi
-    echodebug "Finished patching"
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-
-    fi
-
-    return 0
-}
-
-install_freebsd_stable() {
-#
-# installing latest version of salt from FreeBSD CURRENT ports repo
-#
-    # shellcheck disable=SC2086
-    /usr/local/sbin/pkg install -y py39-salt || return 1
-
-    return 0
-}
-
-install_freebsd_git() {
-
-    # /usr/local/bin/python3 in FreeBSD is a symlink to /usr/local/bin/python3.7
-    __PYTHON_PATH=$(readlink -f "$(command -v python3)")
-    __ESCAPED_PYTHON_PATH=$(echo "${__PYTHON_PATH}" | sed 's/\//\\\//g')
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${__PYTHON_PATH}" || return 1
-    for script in salt_api salt_master salt_minion salt_proxy salt_syndic; do
-        __fetch_url "/usr/local/etc/rc.d/${script}" "https://raw.githubusercontent.com/freebsd/freebsd-ports/master/sysutils/py-salt/files/${script}.in" || return 1
-        sed -i '' 's/%%PREFIX%%/\/usr\/local/g' /usr/local/etc/rc.d/${script}
-        sed -i '' "s/%%PYTHON_CMD%%/${__ESCAPED_PYTHON_PATH}/g" /usr/local/etc/rc.d/${script}
-        chmod +x /usr/local/etc/rc.d/${script} || return 1
-    done
-
-        return 0
-    fi
-
-    # Install from git
-    if [ ! -f salt/syspaths.py ]; then
-        # We still can't provide the system paths, salt 0.16.x
-        ${__PYTHON_PATH} setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1
-    else
-        ${__PYTHON_PATH} setup.py \
-            --salt-root-dir=/ \
-            --salt-config-dir="${_SALT_ETC_DIR}" \
-            --salt-cache-dir="${_SALT_CACHE_DIR}" \
-            --salt-sock-dir=/var/run/salt \
-            --salt-srv-root-dir="${_SALT_ETC_DIR}" \
-            --salt-base-file-roots-dir="${_SALT_ETC_DIR}/states" \
-            --salt-base-pillar-roots-dir="${_SALT_ETC_DIR}/pillar" \
-            --salt-base-master-roots-dir="${_SALT_ETC_DIR}/salt-master" \
-            --salt-logs-dir=/var/log/salt \
-            --salt-pidfile-dir=/var/run \
-            ${SETUP_PY_INSTALL_ARGS} install \
-            || return 1
-    fi
-
-    for script in salt_api salt_master salt_minion salt_proxy salt_syndic; do
-        __fetch_url "/usr/local/etc/rc.d/${script}" "https://raw.githubusercontent.com/freebsd/freebsd-ports/master/sysutils/py-salt/files/${script}.in" || return 1
-        sed -i '' 's/%%PREFIX%%/\/usr\/local/g' /usr/local/etc/rc.d/${script}
-        sed -i '' "s/%%PYTHON_CMD%%/${__ESCAPED_PYTHON_PATH}/g" /usr/local/etc/rc.d/${script}
-        chmod +x /usr/local/etc/rc.d/${script} || return 1
-    done
-
-    # And we're good to go
-    return 0
-}
-
-install_freebsd_stable_post() {
-    for fname in api master minion syndic; do
-        # Skip salt-api since the service should be opt-in and not necessarily started on boot
-        [ $fname = "api" ] && continue
-
-        # Skip if not meant to be installed
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        enable_string="salt_${fname}_enable=YES"
-        grep "$enable_string" /etc/rc.conf >/dev/null 2>&1
-        [ $? -eq 1 ] && sysrc $enable_string
-
-    done
-}
-
-install_freebsd_git_post() {
-    install_freebsd_stable_post || return 1
-    return 0
-}
-
-install_freebsd_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
-
-    for fname in api master minion syndic; do
-        # Skip salt-api since the service should be opt-in and not necessarily started on boot
-        [ $fname = "api" ] && continue
-
-        # Skip if not meant to be installed
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        service salt_$fname stop > /dev/null 2>&1
-        service salt_$fname start
-    done
-}
-
-install_freebsd_onedir() {
-#
-# call install_freebsd_stable
-#
-    install_freebsd_stable || return 1
-
-    return 0
-}
-#
-#   Ended FreeBSD Install Functions
-#
-#######################################################################################################################
-
-#######################################################################################################################
-#
-#   OpenBSD Install Functions
-#
-
-install_openbsd_deps() {
-    if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
-        OPENBSD_REPO='https://cdn.openbsd.org/pub/OpenBSD'
-        echoinfo "setting package repository to $OPENBSD_REPO"
-        echo "${OPENBSD_REPO}" >/etc/installurl || return 1
-    fi
-
-    if [ "${_EXTRA_PACKAGES}" != "" ]; then
-        echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
-        # shellcheck disable=SC2086
-        pkg_add -I -v ${_EXTRA_PACKAGES} || return 1
-    fi
-    return 0
-}
-
-install_openbsd_git_deps() {
-    install_openbsd_deps || return 1
-
-    if ! __check_command_exists git; then
-        pkg_add -I -v git || return 1
-    fi
-    __git_clone_and_checkout || return 1
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        pkg_add -I -v py3-pip py3-setuptools
-    fi
-
-    #
-    # Let's trigger config_salt()
-    #
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-    fi
-
-    return 0
-}
-
-install_openbsd_git() {
-    #
-    # Install from git
-    #
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    if [ ! -f salt/syspaths.py ]; then
-        # We still can't provide the system paths, salt 0.16.x
-        /usr/local/bin/python2.7 setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1
-    fi
-    return 0
-}
-
-install_openbsd_stable() {
-    pkg_add -r -I -v salt || return 1
-    return 0
-}
-
-install_openbsd_post() {
-    for fname in api master minion syndic; do
-        [ $fname = "api" ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        rcctl enable salt_$fname
-    done
-
-    return 0
-}
-
-install_openbsd_check_services() {
-    for fname in api master minion syndic; do
-        # Skip salt-api since the service should be opt-in and not necessarily started on boot
-        [ $fname = "api" ] && continue
-
-        # Skip if not meant to be installed
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && continue
-
-        if [ -f /etc/rc.d/salt_${fname} ]; then
-            __check_services_openbsd salt_${fname} || return 1
-        fi
-    done
-
-    return 0
-}
-
-install_openbsd_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
-
-    for fname in api master minion syndic; do
-        # Skip salt-api since the service should be opt-in and not necessarily started on boot
-        [ $fname = "api" ] && continue
-
-        # Skip if not meant to be installed
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        rcctl restart salt_${fname}
-    done
-
-    return 0
-}
-
-install_openbsd_onedir() {
-#
-# Call install_openbsd_stable
-#
-    install_openbsd_stable || return 1
-
-    return 0
-}
-#
-#   Ended OpenBSD Install Functions
-#
-#######################################################################################################################
-
-#######################################################################################################################
-#
-#   SmartOS Install Functions
-#
-install_smartos_deps() {
-    smartos_deps="$(pkgin show-deps salt | grep '^\s' | grep -v '\snot' | xargs) py27-m2crypto"
-    pkgin -y install "${smartos_deps}" || return 1
-
-    # Set _SALT_ETC_DIR to SmartOS default if they didn't specify
-    _SALT_ETC_DIR=${BS_SALT_ETC_DIR:-/opt/local/etc/salt}
-    # We also need to redefine the PKI directory
-    _PKI_DIR=${_SALT_ETC_DIR}/pki
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        # Let's set the configuration directory to /tmp
-        _TEMP_CONFIG_DIR="/tmp"
-        CONFIG_SALT_FUNC="config_salt"
-
-        # Let's download, since they were not provided, the default configuration files
-        if [ ! -f "$_SALT_ETC_DIR/minion" ] && [ ! -f "$_TEMP_CONFIG_DIR/minion" ]; then
-            # shellcheck disable=SC2086
-            curl $_CURL_ARGS -s -o "$_TEMP_CONFIG_DIR/minion" -L \
-                https://raw.githubusercontent.com/saltstack/salt/master/conf/minion || return 1
-        fi
-        if [ ! -f "$_SALT_ETC_DIR/master" ] && [ ! -f $_TEMP_CONFIG_DIR/master ]; then
-            # shellcheck disable=SC2086
-            curl $_CURL_ARGS -s -o "$_TEMP_CONFIG_DIR/master" -L \
-                https://raw.githubusercontent.com/saltstack/salt/master/conf/master || return 1
-        fi
-    fi
-
-    if [ "$_INSTALL_CLOUD" -eq $BS_TRUE  ]; then
-        pkgin -y install py27-apache-libcloud || return 1
-    fi
-
-    if [ "${_EXTRA_PACKAGES}" != "" ]; then
-        echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
-        # shellcheck disable=SC2086
-        pkgin -y install ${_EXTRA_PACKAGES} || return 1
-    fi
-
-    return 0
-}
-
-install_smartos_git_deps() {
-    install_smartos_deps || return 1
-
-    if ! __check_command_exists git; then
-        pkgin -y install git || return 1
-    fi
-
-    __git_clone_and_checkout || return 1
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-
-        if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-            # Install whichever tornado is in the requirements file
-            __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            __check_pip_allowed "You need to allow pip based installations (-P) in order to install the python package '${__REQUIRED_TORNADO}'"
-
-            # Install whichever futures is in the requirements file
-            __REQUIRED_FUTURES="$(grep futures "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            __check_pip_allowed "You need to allow pip based installations (-P) in order to install the python package '${__REQUIRED_FUTURES}'"
-
-            if [ "${__REQUIRED_TORNADO}" != "" ]; then
-                if ! __check_command_exists pip; then
-                    pkgin -y install py27-pip
-                fi
-                pip install -U "${__REQUIRED_TORNADO}"
-            fi
-
-            if [ "${__REQUIRED_FUTURES}" != "" ]; then
-                if ! __check_command_exists pip; then
-                    pkgin -y install py27-pip
-                fi
-                pip install -U "${__REQUIRED_FUTURES}"
-            fi
-        fi
-    else
-        if ! __check_command_exists pip; then
-            pkgin -y install py27-pip
-        fi
-        pkgin -y install py27-setuptools
-    fi
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-    fi
-
-    return 0
-}
-
-install_smartos_stable() {
-    pkgin -y install salt || return 1
-    return 0
-}
-
-install_smartos_git() {
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    # Use setuptools in order to also install dependencies
-    # lets force our config path on the setup for now, since salt/syspaths.py only  got fixed in 2015.5.0
-    USE_SETUPTOOLS=1 /opt/local/bin/python setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install || return 1
-    return 0
-}
-
-install_smartos_post() {
-    smf_dir="/opt/custom/smf"
-
-    # Install manifest files if needed.
-    for fname in api master minion syndic; do
-        # Skip if not meant to be installed
-        [ $fname = "api" ] && \
-            ([ "$_INSTALL_MASTER" -eq $BS_FALSE ] || ! __check_command_exists "salt-${fname}") && continue
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        svcs network/salt-$fname > /dev/null 2>&1
-        if [ $? -eq 1 ]; then
-            if [ ! -f "$_TEMP_CONFIG_DIR/salt-$fname.xml" ]; then
-                # shellcheck disable=SC2086
-                curl $_CURL_ARGS -s -o "$_TEMP_CONFIG_DIR/salt-$fname.xml" -L \
-                    "https://raw.githubusercontent.com/saltstack/salt/master/pkg/smartos/salt-$fname.xml"
-            fi
-            svccfg import "$_TEMP_CONFIG_DIR/salt-$fname.xml"
-            if [ "${VIRTUAL_TYPE}" = "global" ]; then
-                if [ ! -d "$smf_dir" ]; then
-                    mkdir -p "$smf_dir" || return 1
-                fi
-                if [ ! -f "$smf_dir/salt-$fname.xml" ]; then
-                    __copyfile "$_TEMP_CONFIG_DIR/salt-$fname.xml" "$smf_dir/" || return 1
-                fi
-            fi
-        fi
-    done
-
-    return 0
-}
-
-install_smartos_git_post() {
-    smf_dir="/opt/custom/smf"
-
-    # Install manifest files if needed.
-    for fname in api master minion syndic; do
-        # Skip if not meant to be installed
-        [ $fname = "api" ] && \
-            ([ "$_INSTALL_MASTER" -eq $BS_FALSE ] || ! __check_command_exists "salt-${fname}") && continue
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        svcs "network/salt-$fname" > /dev/null 2>&1
-        if [ $? -eq 1 ]; then
-            svccfg import "${_SALT_GIT_CHECKOUT_DIR}/pkg/smartos/salt-$fname.xml"
-            if [ "${VIRTUAL_TYPE}" = "global" ]; then
-                if [ ! -d $smf_dir ]; then
-                    mkdir -p "$smf_dir"
-                fi
-                if [ ! -f "$smf_dir/salt-$fname.xml" ]; then
-                    __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/smartos/salt-$fname.xml" "$smf_dir/"
-                fi
-            fi
-        fi
-    done
-
-    return 0
-}
-
-install_smartos_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
-
-    for fname in api master minion syndic; do
-        # Skip salt-api since the service should be opt-in and not necessarily started on boot
-        [ $fname = "api" ] && continue
-
-        # Skip if not meant to be installed
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        # Stop if running && Start service
-        svcadm disable salt-$fname > /dev/null 2>&1
-        svcadm enable salt-$fname
-    done
-
-    return 0
-}
-install_smartos_onedir() {
-#
-# call install_smartos_stable
-#
-    install_smartos_stable || return 1
-
-    return 0
-}
-#
-#   Ended SmartOS Install Functions
-#
-#######################################################################################################################
 
 #######################################################################################################################
 #
@@ -7749,38 +6692,85 @@ install_smartos_onedir() {
 #
 __ZYPPER_REQUIRES_REPLACE_FILES=-1
 
-__set_suse_pkg_repo() {
-
-    # Set distro repo variable
-    if [ "${DISTRO_MAJOR_VERSION}" -gt 2015 ]; then
-        DISTRO_REPO="openSUSE_Tumbleweed"
-    elif [ "${DISTRO_MAJOR_VERSION}" -eq 15 ] && [ "${DISTRO_MINOR_VERSION}" -ge 4 ]; then
-        DISTRO_REPO="${DISTRO_MAJOR_VERSION}.${DISTRO_MINOR_VERSION}"
-    elif [ "${DISTRO_MAJOR_VERSION}" -ge 42 ] || [ "${DISTRO_MAJOR_VERSION}" -eq 15 ]; then
-        DISTRO_REPO="openSUSE_Leap_${DISTRO_MAJOR_VERSION}.${DISTRO_MINOR_VERSION}"
-    else
-        DISTRO_REPO="SLE_${DISTRO_MAJOR_VERSION}_SP${SUSE_PATCHLEVEL}"
-    fi
-
-    suse_pkg_url_base="https://download.opensuse.org/repositories/systemsmanagement:/saltstack"
-    suse_pkg_url_path="${DISTRO_REPO}/systemsmanagement:saltstack.repo"
-    SUSE_PKG_URL="$suse_pkg_url_base/$suse_pkg_url_path"
-}
 
 __check_and_refresh_suse_pkg_repo() {
     # Check to see if systemsmanagement_saltstack exists
-    __zypper repos | grep -q systemsmanagement_saltstack
+    __zypper repos | grep -q 'salt.repo'
 
     if [ $? -eq 1 ]; then
-        # zypper does not yet know anything about systemsmanagement_saltstack
-        __zypper addrepo --refresh "${SUSE_PKG_URL}" || return 1
+        # zypper does not yet know anything about salt.repo
+        # zypper does not support exclude similar to Photon, hence have to do following
+        ZYPPER_REPO_FILE="/etc/zypp/repos.d/salt.repo"
+        # shellcheck disable=SC2129
+        if [ "$ONEDIR_REV" != "latest" ]; then
+            # 3006.x is default, and latest for 3006.x branch
+            if [ "$(echo "$ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+                # latest version for branch 3006 | 3007
+                REPO_REV_MAJOR=$(echo "$ONEDIR_REV" | cut -d '.' -f 1)
+                if [ "$REPO_REV_MAJOR" -eq "3007" ]; then
+                    # Enable the Salt 3007 STS repo
+                    echo "[salt-repo-3007-sts]" > "${ZYPPER_REPO_FILE}"
+                    echo "name=Salt Repo for Salt v3007 STS" >> "${ZYPPER_REPO_FILE}"
+                    echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${ZYPPER_REPO_FILE}"
+                    echo "skip_if_unavailable=True" >> "${ZYPPER_REPO_FILE}"
+                    echo "priority=10" >> "${ZYPPER_REPO_FILE}"
+                    echo "enabled=1" >> "${ZYPPER_REPO_FILE}"
+                    echo "enabled_metadata=1" >> "${ZYPPER_REPO_FILE}"
+                    echo "exclude=*3006* *3008* *3009* *3010*" >> "${ZYPPER_REPO_FILE}"
+                    echo "gpgcheck=1" >> "${ZYPPER_REPO_FILE}"
+                    echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${ZYPPER_REPO_FILE}"
+                    zypper addlock "salt-* < 3007" && zypper addlock "salt-* >= 3008"
+                else
+                    # Salt 3006 repo
+                    echo "[salt-repo-3006-lts]" > "${ZYPPER_REPO_FILE}"
+                    echo "name=Salt Repo for Salt v3006 LTS" >> "${ZYPPER_REPO_FILE}"
+                    echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${ZYPPER_REPO_FILE}"
+                    echo "skip_if_unavailable=True" >> "${ZYPPER_REPO_FILE}"
+                    echo "priority=10" >> "${ZYPPER_REPO_FILE}"
+                    echo "enabled=1" >> "${ZYPPER_REPO_FILE}"
+                    echo "enabled_metadata=1" >> "${ZYPPER_REPO_FILE}"
+                    echo "exclude=*3007* *3008* *3009* *3010*" >> "${ZYPPER_REPO_FILE}"
+                    echo "gpgcheck=1" >> "${ZYPPER_REPO_FILE}"
+                    echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${ZYPPER_REPO_FILE}"
+                    zypper addlock "salt-* < 3006" && zypper addlock "salt-* >= 3007"
+                fi
+            elif [ "$(echo "$ONEDIR_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+                # using minor version
+                ONEDIR_REV_DOT=$(echo "$ONEDIR_REV" | sed 's/-/\./')
+                echo "[salt-repo-${ONEDIR_REV_DOT}-lts]" > "${ZYPPER_REPO_FILE}"
+                echo "name=Salt Repo for Salt v${ONEDIR_REV_DOT} LTS" >> "${ZYPPER_REPO_FILE}"
+                echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${ZYPPER_REPO_FILE}"
+                echo "skip_if_unavailable=True" >> "${ZYPPER_REPO_FILE}"
+                echo "priority=10" >> "${ZYPPER_REPO_FILE}"
+                echo "enabled=1" >> "${ZYPPER_REPO_FILE}"
+                echo "enabled_metadata=1" >> "${ZYPPER_REPO_FILE}"
+                echo "gpgcheck=1" >> "${ZYPPER_REPO_FILE}"
+                echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${ZYPPER_REPO_FILE}"a
+                ONEDIR_MAJ_VER=$(echo "${ONEDIR_REV_DOT}" | awk -F '.' '{print $1}')
+                # shellcheck disable=SC2004
+                ONEDIR_MAJ_VER_PLUS=$((${ONEDIR_MAJ_VER} + 1))
+                zypper addlock "salt-* < ${ONEDIR_MAJ_VER}" && zypper addlock "salt-* >= ${ONEDIR_MAJ_VER_PLUS}"
+            fi
+        else
+            # Enable the Salt LATEST repo
+            echo "[salt-repo-latest]" > "${ZYPPER_REPO_FILE}"
+            echo "name=Salt Repo for Salt LATEST release" >> "${ZYPPER_REPO_FILE}"
+            echo "baseurl=https://${_REPO_URL}/saltproject-rpm/" >> "${ZYPPER_REPO_FILE}"
+            echo "skip_if_unavailable=True" >> "${ZYPPER_REPO_FILE}"
+            echo "priority=10" >> "${ZYPPER_REPO_FILE}"
+            echo "enabled=1" >> "${ZYPPER_REPO_FILE}"
+            echo "enabled_metadata=1" >> "${ZYPPER_REPO_FILE}"
+            echo "gpgcheck=1" >> "${ZYPPER_REPO_FILE}"
+            echo "gpgkey=https://${_REPO_URL}/api/security/keypair/SaltProjectKey/public" >> "${ZYPPER_REPO_FILE}"
+        fi
+        __zypper addrepo --refresh "${ZYPPER_REPO_FILE}" || return 1
     fi
 }
 
 __version_lte() {
-    if ! __check_command_exists python; then
-        zypper --non-interactive install --replacefiles --auto-agree-with-licenses python || \
-             zypper --non-interactive install --auto-agree-with-licenses python || return 1
+    if ! __check_command_exists python3; then
+        zypper --non-interactive install --replacefiles --auto-agree-with-licenses python3 || \
+             zypper --non-interactive install --auto-agree-with-licenses python3 || return 1
     fi
 
     if [ "$(${_PY_EXE} -c 'import sys; V1=tuple([int(i) for i in sys.argv[1].split(".")]); V2=tuple([int(i) for i in sys.argv[2].split(".")]); print(V1<=V2)' "$1" "$2")" = "True" ]; then
@@ -7823,9 +6813,7 @@ __zypper_install() {
 
 __opensuse_prep_install() {
     # DRY function for common installation preparatory steps for SUSE
-    if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
-        # Is the repository already known
-        __set_suse_pkg_repo
+    if [ "$_DISABLE_REPOS" -eq $BS_FALSE ]; then
         # Check zypper repos and refresh if necessary
         __check_and_refresh_suse_pkg_repo
     fi
@@ -7855,7 +6843,7 @@ install_opensuse_stable_deps() {
     # YAML module is used for generating custom master/minion configs
     # requests is still used by many salt modules
     # Salt needs python-zypp installed in order to use the zypper module
-    __PACKAGES="python-PyYAML python-requests python-zypp"
+    __PACKAGES="python${PY_PKG_VER}-PyYAML python${PY_PKG_VER}-requests python${PY_PKG_VER}-zypp"
 
     # shellcheck disable=SC2086
     __zypper_install ${__PACKAGES} || return 1
@@ -7880,29 +6868,14 @@ install_opensuse_git_deps() {
         __zypper_install git  || return 1
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        __zypper_install patch || return 1
-
-        __PACKAGES="libzmq5 python-Jinja2 python-m2crypto python-msgpack-python python-pycrypto python-pyzmq python-xml python-futures"
-
-        if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-            # We're on the master branch, install whichever tornado is on the requirements file
-            __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            if [ "${__REQUIRED_TORNADO}" != "" ]; then
-                __PACKAGES="${__PACKAGES} python-tornado"
-            fi
-        fi
-
-        if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-            __PACKAGES="${__PACKAGES} python-apache-libcloud"
-        fi
     # Check for Tumbleweed
-    elif [ "${DISTRO_MAJOR_VERSION}" -ge 20210101 ]; then
+    if [ "${DISTRO_MAJOR_VERSION}" -ge 20210101 ]; then
         __PACKAGES="python3-pip gcc-c++ python3-pyzmq-devel"
     else
-        __PACKAGES="python-pip python-setuptools gcc"
+        __PACKAGES="python3-pip python3-setuptools gcc"
     fi
 
     # shellcheck disable=SC2086
@@ -7910,7 +6883,7 @@ install_opensuse_git_deps() {
 
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
@@ -7922,19 +6895,34 @@ install_opensuse_onedir_deps() {
 }
 
 install_opensuse_stable() {
+    if [ "$(echo "$STABLE_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+        # Major version Salt, config and repo already setup
+        MINOR_VER_STRG=""
+    elif [ "$(echo "$STABLE_REV" | grep -E '^([3-9][0-5]{2}[6-9](\.[0-9]*)?)')" != "" ]; then
+        # Minor version Salt, need to add specific minor version
+        STABLE_REV_DOT=$(echo "$STABLE_REV" | sed 's/-/\./')
+        MINOR_VER_STRG="-$STABLE_REV_DOT"
+    else
+        MINOR_VER_STRG=""
+    fi
+
     __PACKAGES=""
 
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then
-        __PACKAGES="${__PACKAGES} salt-cloud"
+        __PACKAGES="${__PACKAGES} salt-cloud$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MASTER" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-master"
+        __PACKAGES="${__PACKAGES} salt-master$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-minion"
+        __PACKAGES="${__PACKAGES} salt-minion$MINOR_VER_STRG"
     fi
     if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} salt-syndic"
+        __PACKAGES="${__PACKAGES} salt-syndic$MINOR_VER_STRG"
+    fi
+
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ]; then
+        __PACKAGES="${__PACKAGES} salt-api$MINOR_VER_STRG"
     fi
 
     # shellcheck disable=SC2086
@@ -7944,12 +6932,7 @@ install_opensuse_stable() {
 }
 
 install_opensuse_git() {
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    python setup.py ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1
+    __install_salt_from_repo "${_PY_EXE}" || return 1
     return 0
 }
 
@@ -7967,7 +6950,7 @@ install_opensuse_stable_post() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ] || [ -f /usr/bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
             sleep 1
             systemctl daemon-reload
@@ -7990,7 +6973,7 @@ install_opensuse_git_post() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if command -v systemctl; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             use_usr_lib=$BS_FALSE
 
             if [ "${DISTRO_MAJOR_VERSION}" -ge 15 ]; then
@@ -8031,7 +7014,7 @@ install_opensuse_onedir_post() {
 }
 
 install_opensuse_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
 
     for fname in api master minion syndic; do
         # Skip salt-api since the service should be opt-in and not necessarily started on boot
@@ -8042,7 +7025,7 @@ install_opensuse_restart_daemons() {
         [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
         [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
 
-        if [ -f /bin/systemctl ]; then
+        if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
             systemctl stop salt-$fname > /dev/null 2>&1
             systemctl start salt-$fname.service && continue
             echodebug "Failed to start salt-$fname using systemd"
@@ -8058,7 +7041,7 @@ install_opensuse_restart_daemons() {
 }
 
 install_opensuse_check_services() {
-    if [ ! -f /bin/systemctl ]; then
+    if [ "$_SYSTEMD_FUNCTIONAL" -eq $BS_TRUE ]; then
         # Not running systemd!? Don't check!
         return 0
     fi
@@ -8092,11 +7075,10 @@ install_opensuse_15_stable_deps() {
 
     # SUSE only packages Salt for Python 3 on Leap 15
     # Py3 is the default bootstrap install for Leap 15
-    # However, git installs might specify "-x python2"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-        PY_PKG_VER=2
-    else
-        PY_PKG_VER=3
+    # However, git installs that specify "-x python2" are disallowed
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
     # YAML module is used for generating custom master/minion configs
@@ -8122,49 +7104,23 @@ install_opensuse_15_git_deps() {
         __zypper_install git  || return 1
     fi
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-        PY_PKG_VER=2
-    else
-        PY_PKG_VER=3
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
-    __PACKAGES="python${PY_PKG_VER}-xml"
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-
-        # Py3 is the default bootstrap install for Leap 15
-        # However, git installs might specify "-x python2"
-        if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then
-            # This is required by some of the python2 packages below
-            __PACKAGES="${__PACKAGES} libpython2_7-1_0 python2-futures python-ipaddress"
-        fi
-
-        __PACKAGES="${__PACKAGES} libzmq5 python${PY_PKG_VER}-Jinja2 python${PY_PKG_VER}-msgpack"
-        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-pycrypto python${PY_PKG_VER}-pyzmq"
-
-        if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-            # We're on the master branch, install whichever tornado is on the requirements file
-            __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-            if [ "${__REQUIRED_TORNADO}" != "" ]; then
-                __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado"
-            fi
-        fi
-
-        if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-            __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-apache-libcloud"
-        fi
-    else
-        __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
-    fi
+    PY_PKG_VER=3
+    __PACKAGES="python${PY_PKG_VER}-xml python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc"
 
     # shellcheck disable=SC2086
     __zypper_install ${__PACKAGES} || return 1
 
     # Let's trigger config_salt()
     if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
+        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf"
         CONFIG_SALT_FUNC="config_salt"
     fi
 
@@ -8175,17 +7131,12 @@ install_opensuse_15_git() {
 
     # Py3 is the default bootstrap install for Leap 15
     if [ -n "$_PY_EXE" ]; then
-        _PYEXE=${_PY_EXE}
+        _PYEXE="${_PY_EXE}"
     else
         _PYEXE=python3
     fi
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    ${_PYEXE} setup.py ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1
+    __install_salt_from_repo "${_PY_EXE}" || return 1
     return 0
 }
 
@@ -8265,272 +7216,94 @@ install_suse_15_restart_daemons() {
     return 0
 }
 
+install_suse_15_check_services() {
+    install_opensuse_check_services || return 1
+    return 0
+}
+
 #
 #   End of SUSE Enterprise 15
 #
 #######################################################################################################################
 
+
 #######################################################################################################################
 #
-#   SUSE Enterprise 12
+#   SUSE Enterprise 15, now has ID sled
 #
 
-install_suse_12_stable_deps() {
+install_sled_15_stable_deps() {
     __opensuse_prep_install || return 1
-
-    # YAML module is used for generating custom master/minion configs
-    # requests is still used by many salt modules
-    # Salt needs python-zypp installed in order to use the zypper module
-    __PACKAGES="python-PyYAML python-requests python-zypp"
-
-    if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} python-apache-libcloud"
-    fi
-
-    # shellcheck disable=SC2086,SC2090
-    __zypper_install ${__PACKAGES} || return 1
-
-    # SLES 11 SP3 ships with both python-M2Crypto-0.22.* and python-m2crypto-0.21 and we will be asked which
-    # we want to install, even with --non-interactive.
-    # Let's try to install the higher version first and then the lower one in case of failure
-    __zypper_install 'python-M2Crypto>=0.22' || __zypper_install 'python-M2Crypto>=0.21' || return 1
-
-    if [ "${_EXTRA_PACKAGES}" != "" ]; then
-        echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
-        # shellcheck disable=SC2086
-        __zypper_install ${_EXTRA_PACKAGES} || return 1
-    fi
+    install_opensuse_15_stable_deps || return 1
 
     return 0
 }
 
-install_suse_12_git_deps() {
-    install_suse_12_stable_deps || return 1
+install_sled_15_git_deps() {
+    install_suse_15_stable_deps || return 1
 
     if ! __check_command_exists git; then
         __zypper_install git-core  || return 1
     fi
 
-    __git_clone_and_checkout || return 1
-
-    __PACKAGES=""
-    # shellcheck disable=SC2089
-    __PACKAGES="${__PACKAGES} libzmq4 python-Jinja2 python-msgpack-python python-pycrypto"
-    __PACKAGES="${__PACKAGES} python-pyzmq python-xml"
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-        # We're on the master branch, install whichever tornado is on the requirements file
-        __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-        if [ "${__REQUIRED_TORNADO}" != "" ]; then
-            __PACKAGES="${__PACKAGES} python-tornado"
-        fi
-    fi
-
-    if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} python-apache-libcloud"
-    fi
-
-    # shellcheck disable=SC2086
-    __zypper_install ${__PACKAGES} || return 1
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-    fi
+    install_opensuse_15_git_deps || return 1
 
     return 0
 }
 
-install_suse_12_onedir_deps() {
-    install_suse_12_stable_deps || return 1
-    return 0
-}
-
-install_suse_12_stable() {
-    install_opensuse_stable || return 1
-    return 0
-}
-
-install_suse_12_git() {
-    install_opensuse_git || return 1
-    return 0
-}
-
-install_suse_12_onedir() {
-    install_opensuse_stable || return 1
-    return 0
-}
-
-install_suse_12_stable_post() {
-    install_opensuse_stable_post || return 1
-    return 0
-}
-
-install_suse_12_git_post() {
-    install_opensuse_git_post || return 1
-    return 0
-}
-
-install_suse_12_onedir_post() {
-    install_opensuse_stable_post || return 1
-    return 0
-}
-
-install_suse_12_restart_daemons() {
-    install_opensuse_restart_daemons || return 1
-    return 0
-}
-
-#
-#   End of SUSE Enterprise 12
-#
-#######################################################################################################################
-
-#######################################################################################################################
-#
-#   SUSE Enterprise 11
-#
-
-install_suse_11_stable_deps() {
+install_sled_15_onedir_deps() {
     __opensuse_prep_install || return 1
-
-    # YAML module is used for generating custom master/minion configs
-    __PACKAGES="python-PyYAML"
-
-    # shellcheck disable=SC2086,SC2090
-    __zypper_install ${__PACKAGES} || return 1
-
-    # SLES 11 SP3 ships with both python-M2Crypto-0.22.* and python-m2crypto-0.21 and we will be asked which
-    # we want to install, even with --non-interactive.
-    # Let's try to install the higher version first and then the lower one in case of failure
-    __zypper_install 'python-M2Crypto>=0.22' || __zypper_install 'python-M2Crypto>=0.21' || return 1
-
-    if [ "${_EXTRA_PACKAGES}" != "" ]; then
-        echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
-        # shellcheck disable=SC2086
-        __zypper_install ${_EXTRA_PACKAGES} || return 1
-    fi
+    install_opensuse_15_onedir_deps || return 1
 
     return 0
 }
 
-install_suse_11_git_deps() {
-    install_suse_11_stable_deps || return 1
-
-    if ! __check_command_exists git; then
-        __zypper_install git  || return 1
-    fi
-
-    __git_clone_and_checkout || return 1
-
-    __PACKAGES=""
-    # shellcheck disable=SC2089
-    __PACKAGES="${__PACKAGES} libzmq4 python-Jinja2 python-msgpack-python python-pycrypto"
-    __PACKAGES="${__PACKAGES} python-pyzmq python-xml python-zypp"
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
-        # We're on the master branch, install whichever tornado is on the requirements file
-        __REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
-        if [ "${__REQUIRED_TORNADO}" != "" ]; then
-            __PACKAGES="${__PACKAGES} python-tornado"
-        fi
-    fi
-
-    if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
-        __PACKAGES="${__PACKAGES} python-apache-libcloud"
-    fi
-
-    # shellcheck disable=SC2086
-    __zypper_install ${__PACKAGES} || return 1
-
-    # Let's trigger config_salt()
-    if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
-        _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
-        CONFIG_SALT_FUNC="config_salt"
-    fi
-
-    return 0
-}
-
-install_suse_11_onedir_deps() {
-    install_suse_11_stable_deps || return 1
-    return 0
-}
-
-install_suse_11_stable() {
+install_sled_15_stable() {
     install_opensuse_stable || return 1
     return 0
 }
 
-install_suse_11_git() {
-    install_opensuse_git || return 1
+install_sled_15_git() {
+    install_opensuse_15_git || return 1
     return 0
 }
 
-install_suse_11_onedir() {
+install_sled_15_onedir() {
     install_opensuse_stable || return 1
     return 0
 }
 
-install_suse_11_stable_post() {
+install_sled_15_stable_post() {
     install_opensuse_stable_post || return 1
     return 0
 }
 
-install_suse_11_git_post() {
+install_sled_15_git_post() {
     install_opensuse_git_post || return 1
     return 0
 }
 
-install_suse_11_onedir_post() {
+install_sled_15_onedir_post() {
     install_opensuse_stable_post || return 1
     return 0
 }
 
-install_suse_11_restart_daemons() {
+install_sled_15_restart_daemons() {
     install_opensuse_restart_daemons || return 1
     return 0
 }
 
-
-#
-#   End of SUSE Enterprise 11
-#
-#######################################################################################################################
-
-#######################################################################################################################
-#
-# SUSE Enterprise General Functions
-#
-
-# Used for both SLE 11 and 12
-install_suse_check_services() {
-    if [ ! -f /bin/systemctl ]; then
-        # Not running systemd!? Don't check!
-        return 0
-    fi
-
-    for fname in api master minion syndic; do
-        # Skip salt-api since the service should be opt-in and not necessarily started on boot
-        [ $fname = "api" ] && continue
-
-        # Skip if not meant to be installed
-        [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
-        [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
-        [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
-
-        __check_services_systemd salt-$fname || return 1
-    done
-
+install_sled_15_check_services() {
+    install_opensuse_check_services || return 1
     return 0
 }
 
 #
-#   End of SUSE Enterprise General Functions
+#   End of SUSE Enterprise 15 aka sled
 #
 #######################################################################################################################
 
+
 #######################################################################################################################
 #
 #    Gentoo Install Functions.
@@ -8587,9 +7360,15 @@ __gentoo_pre_dep() {
         mkdir /etc/portage
     fi
 
-    # Enable Python 3.7 target for Salt Neon using GIT
-    if [ "${ITYPE}" = "git" ] && [ "${GIT_REV}" = "v3000" ]; then
-        EXTRA_PYTHON_TARGET=python3_7
+    # Enable Python 3.10 target for Salt 3006 or later, otherwise 3.7 as previously, using GIT
+    if [ "${ITYPE}" = "git" ]; then
+        GIT_REV_MAJOR=$(echo "${GIT_REV}" | awk -F "." '{print $1}')
+        if [ "${GIT_REV_MAJOR}" = "v3006" ] || [ "${GIT_REV_MAJOR}" = "v3007" ]; then
+            EXTRA_PYTHON_TARGET=python3_10
+        else
+            # assume pre-3006, so leave it as Python 3.7
+            EXTRA_PYTHON_TARGET=python3_7
+        fi
     fi
 
     if [ -n "${EXTRA_PYTHON_TARGET:-}" ]; then
@@ -8605,7 +7384,6 @@ __gentoo_post_dep() {
         echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
         # shellcheck disable=SC2086
         __autounmask ${_EXTRA_PACKAGES} || return 1
-        # shellcheck disable=SC2086
         __emerge ${_EXTRA_PACKAGES} || return 1
     fi
 
@@ -8649,27 +7427,6 @@ install_gentoo_git_deps() {
         GENTOO_GIT_PACKAGES="${GENTOO_GIT_PACKAGES:-} dev-vcs/git"
     fi
 
-    # Salt <3000 does not automatically install dependencies. It has to be done manually.
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-        GENTOO_GIT_PACKAGES="${GENTOO_GIT_PACKAGES:-}
-            sys-apps/pciutils
-            dev-python/pyyaml
-            dev-python/pyzmq
-            dev-python/libnacl
-            dev-python/pycryptodome
-            dev-python/py
-            dev-python/requests
-            <dev-python/msgpack-1.0
-            dev-python/jinja
-            dev-python/pyasn1
-            dev-python/markupsafe
-            dev-python/cython
-            dev-python/six
-            dev-python/idna
-            dev-python/pycurl
-            <www-servers/tornado-5.0"
-    fi
-
     # Install libcloud when Salt Cloud support was requested
     if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
         GENTOO_GIT_PACKAGES="${GENTOO_GIT_PACKAGES:-} dev-python/libcloud"
@@ -8685,6 +7442,7 @@ install_gentoo_git_deps() {
     echoinfo "Running emerge -v1 setuptools"
     __emerge -v1 setuptools || return 1
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
     __gentoo_post_dep || return 1
 }
@@ -8699,36 +7457,17 @@ install_gentoo_stable() {
 
     # shellcheck disable=SC2086
     __autounmask ${GENTOO_SALT_PACKAGE} || return 1
-    # shellcheck disable=SC2086
     __emerge ${GENTOO_SALT_PACKAGE} || return 1
 }
 
 install_gentoo_git() {
-    _PYEXE=${_PY_EXE}
+    _PYEXE="${_PY_EXE}"
 
     if [ "$_PY_EXE" = "python3" ] || [ -z "$_PY_EXE" ]; then
-        if [ "${GIT_REV}" = "v3000" ]; then
-            # Salt Neon does not support Python 3.8 and greater
-            _PYEXE=python3.7
-        elif [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then
-            # Tornado 4.3 ebuild supports only Python 3.6, use Python 3.6 as the default Python 3 interpreter
-            _PYEXE=python3.6
-        else
-            _PYEXE=$(emerge --info | grep -oE 'PYTHON_SINGLE_TARGET="[^"]*"' | sed -e 's/"//g' -e 's/_/./g' | cut -d= -f2)
-        fi
-    fi
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        __install_salt_from_repo_post_neon "${_PYEXE}" || return 1
-        return 0
-    fi
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        "${_PYEXE}" setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install || return 1
-    else
-        "${_PYEXE}" setup.py ${SETUP_PY_INSTALL_ARGS} install || return 1
+        _PYEXE=$(emerge --info | grep -oE 'PYTHON_SINGLE_TARGET="[^"]*"' | sed -e 's/"//g' -e 's/_/./g' | cut -d= -f2)
     fi
 
+    __install_salt_from_repo "${_PYEXE}" || return 1
     return 0
 }
 
@@ -8831,7 +7570,7 @@ install_gentoo_onedir_post() {
 }
 
 install_gentoo_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
 
     # Ensure upstart configs / systemd units are loaded
     if __check_command_exists systemctl ; then
@@ -8922,7 +7661,7 @@ install_voidlinux_stable_post() {
 }
 
 install_voidlinux_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
 
     for fname in master minion syndic; do
         [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
@@ -8972,79 +7711,87 @@ daemons_running_voidlinux() {
 #   OS X / Darwin Install Functions
 #
 
-__macosx_get_packagesite() {
-    DARWIN_ARCH="x86_64"
+#---  FUNCTION  -------------------------------------------------------------------------------------------------------
+#          NAME:  __macosx_get_packagesite_onedir_latest
+#   DESCRIPTION:  Set _PKG_VERSION to the latest for MacOS or latest for major version input
+#----------------------------------------------------------------------------------------------------------------------
+__macosx_get_packagesite_onedir_latest() {
 
-    __PY_VERSION_REPO="py2"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
+    echodebug "Find latest MacOS release from repository"
+
+    # get dir listing from url, sort and pick highest
+    macos_versions_tmpdir=$(mktemp -d)
+    curr_pwd=$(pwd)
+    cd  ${macos_versions_tmpdir} || return 1
+    wget -q -r -np -nH --exclude-directories=onedir,relenv,windows -x -l 1 "$SALT_MACOS_PKGDIR_URL/"
+    if [ "$#" -gt 0 ] && [ -n "$1" ]; then
+        MAJOR_VER="$1"
+        # shellcheck disable=SC2010
+        _PKG_VERSION=$(ls artifactory/saltproject-generic/macos/ | grep -v 'index.html' | sort -V -u | grep -E "$MAJOR_VER" | tail -n 1)
+    else
+        # shellcheck disable=SC2010
+        _PKG_VERSION=$(ls artifactory/saltproject-generic/macos/ | grep -v 'index.html' | sort -V -u | tail -n 1)
     fi
+    cd ${curr_pwd} || return "${_PKG_VERSION}"
+    rm -fR ${macos_versions_tmpdir}
+
+    echodebug "latest MacOS release from repository found ${_PKG_VERSION}"
 
-    PKG="salt-${STABLE_REV}-${__PY_VERSION_REPO}-${DARWIN_ARCH}.pkg"
-    SALTPKGCONFURL="https://${_REPO_URL}/osx/${PKG}"
 }
 
-__parse_repo_json_python() {
-
-  # Using latest, grab the right
-  # version from the repo.json
-  _JSON_VERSION=$(python - <<-EOF
-import json, urllib.request
-url = "https://repo.saltproject.io/salt/py3/macos/repo.json"
-response = urllib.request.urlopen(url)
-data = json.loads(response.read())
-version = data["${_ONEDIR_REV}"][list(data["${_ONEDIR_REV}"])[0]]['version']
-print(version)
-EOF
-)
-echo "${_JSON_VERSION}"
-}
 
 __macosx_get_packagesite_onedir() {
-    DARWIN_ARCH="x86_64"
 
-    __PY_VERSION_REPO="py2"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PY_VERSION_REPO="py3"
+    echodebug "Get package site for onedir from repository"
+
+    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -ne 3 ]; then
+        echoerror "Python version is no longer supported, only Python 3"
+        return 1
     fi
 
+    DARWIN_ARCH=${CPU_ARCH_L}
+    _PKG_VERSION=""
+
+    _ONEDIR_TYPE="saltproject-generic"
+    SALT_MACOS_PKGDIR_URL="https://${_REPO_URL}/${_ONEDIR_TYPE}/macos"
     if [ "$(echo "$_ONEDIR_REV" | grep -E '^(latest)$')" != "" ]; then
-      _PKG_VERSION=$(__parse_repo_json_python)
-    elif [ "$(echo "$_ONEDIR_REV" | grep -E '^([3-9][0-9]{3}(\.[0-9]*))')" != "" ]; then
-      _PKG_VERSION=$_ONEDIR_REV
+        __macosx_get_packagesite_onedir_latest
+    elif [ "$(echo "$_ONEDIR_REV" | grep -E '^(3006|3007)$')" != "" ]; then
+        # need to get latest for major version
+        __macosx_get_packagesite_onedir_latest "$_ONEDIR_REV"
+    elif [ "$(echo "$_ONEDIR_REV" | grep -E '^([3-9][0-9]{3}(\.[0-9]*)?)')" != "" ]; then
+        _PKG_VERSION=$_ONEDIR_REV
     else
-      _PKG_VERSION=$(__parse_repo_json_python)
+        # default to getting latest
+        __macosx_get_packagesite_onedir_latest
     fi
-    if [ "$(echo "$_ONEDIR_REV" | grep -E '^(3005)')" != "" ]; then
-      PKG="salt-${_PKG_VERSION}-macos-${DARWIN_ARCH}.pkg"
-    else
-      PKG="salt-${_PKG_VERSION}-${__PY_VERSION_REPO}-${DARWIN_ARCH}.pkg"
-    fi
-    SALTPKGCONFURL="https://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/macos/${ONEDIR_REV}/${PKG}"
-}
 
-# Using a separate conf step to head for idempotent install...
-__configure_macosx_pkg_details() {
-    __macosx_get_packagesite || return 1
-    return 0
+    PKG="salt-${_PKG_VERSION}-py3-${DARWIN_ARCH}.pkg"
+    SALTPKGCONFURL="${SALT_MACOS_PKGDIR_URL}/${_PKG_VERSION}/${PKG}"
+
+
 }
 
 __configure_macosx_pkg_details_onedir() {
+
     __macosx_get_packagesite_onedir || return 1
     return 0
 }
 
 install_macosx_stable_deps() {
-    __configure_macosx_pkg_details || return 1
+
+    __configure_macosx_pkg_details_onedir || return 1
     return 0
 }
 
 install_macosx_onedir_deps() {
+
     __configure_macosx_pkg_details_onedir || return 1
     return 0
 }
 
 install_macosx_git_deps() {
+
     install_macosx_stable_deps || return 1
 
     if ! echo "$PATH" | grep -q /usr/local/bin; then
@@ -9055,32 +7802,23 @@ install_macosx_git_deps() {
     __fetch_url "/tmp/get-pip.py" "https://bootstrap.pypa.io/get-pip.py" || return 1
 
     if [ -n "$_PY_EXE" ]; then
-        _PYEXE=${_PY_EXE}
+        _PYEXE="${_PY_EXE}"
     else
-        _PYEXE=python2.7
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     fi
 
     # Install PIP
     $_PYEXE /tmp/get-pip.py || return 1
 
+    # shellcheck disable=SC2119
     __git_clone_and_checkout || return 1
 
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-        return 0
-    fi
-
-    __PIP_REQUIREMENTS="dev_python27.txt"
-    if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
-        __PIP_REQUIREMENTS="dev_python34.txt"
-    fi
-
-    requirements_file="${_SALT_GIT_CHECKOUT_DIR}/requirements/${__PIP_REQUIREMENTS}"
-    pip install -U -r "${requirements_file}" --install-option="--prefix=/opt/salt" || return 1
-
     return 0
 }
 
 install_macosx_stable() {
+
     install_macosx_stable_deps || return 1
 
     __fetch_url "/tmp/${PKG}" "${SALTPKGCONFURL}" || return 1
@@ -9091,6 +7829,7 @@ install_macosx_stable() {
 }
 
 install_macosx_onedir() {
+
     install_macosx_onedir_deps || return 1
 
     __fetch_url "/tmp/${PKG}" "${SALTPKGCONFURL}" || return 1
@@ -9102,27 +7841,20 @@ install_macosx_onedir() {
 
 install_macosx_git() {
 
+
     if [ -n "$_PY_EXE" ]; then
-        _PYEXE=${_PY_EXE}
+        _PYEXE="${_PY_EXE}"
     else
-        _PYEXE=python2.7
-    fi
-
-    if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then
-         __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1
-        return 0
-    fi
-
-    if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then
-        $_PYEXE setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --prefix=/opt/salt || return 1
-    else
-        $_PYEXE setup.py ${SETUP_PY_INSTALL_ARGS} install --prefix=/opt/salt || return 1
+        echoerror "Python 2 is no longer supported, only Python 3"
+        return 1
     fi
 
+    __install_salt_from_repo "${_PY_EXE}" || return 1
     return 0
 }
 
 install_macosx_stable_post() {
+
     if [ ! -f /etc/paths.d/salt ]; then
         print "%s\n" "/opt/salt/bin" "/usr/local/sbin" > /etc/paths.d/salt
     fi
@@ -9138,17 +7870,20 @@ install_macosx_stable_post() {
 }
 
 install_macosx_onedir_post() {
+
     install_macosx_stable_post || return 1
     return 0
 }
 
 install_macosx_git_post() {
+
     install_macosx_stable_post || return 1
     return 0
 }
 
 install_macosx_restart_daemons() {
-    [ $_START_DAEMONS -eq $BS_FALSE ] && return
+
+    [ "$_START_DAEMONS" -eq $BS_FALSE ] && return
 
     if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then
       /bin/launchctl unload -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1
@@ -9173,6 +7908,7 @@ install_macosx_restart_daemons() {
 #   the -c options is passed.
 #
 config_salt() {
+
     # If the configuration directory is not passed, return
     [ "$_TEMP_CONFIG_DIR" = "null" ] && return
 
@@ -9216,7 +7952,7 @@ config_salt() {
 
             # Check if a minion config file already exists and move to .bak if needed
             if [ -f "$_SALT_ETC_DIR/minion" ] && [ "$CREATE_BAK" -eq "$BS_TRUE" ]; then
-                __movefile "$_SALT_ETC_DIR/minion" "$_SALT_ETC_DIR/minion.bak" $BS_TRUE || return 1
+                __movefile "$_SALT_ETC_DIR/minion" "$_SALT_ETC_DIR/minion.bak" "$BS_TRUE" || return 1
                 CONFIGURED_ANYTHING=$BS_TRUE
             fi
 
@@ -9259,8 +7995,11 @@ config_salt() {
     if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ] && [ "$_CONFIG_ONLY" -eq $BS_TRUE ]; then
         OVERWRITE_MASTER_CONFIGS=$BS_TRUE
     fi
+    if [ "$_INSTALL_SALT_API" -eq $BS_TRUE ] && [ "$_CONFIG_ONLY" -eq $BS_TRUE ]; then
+        OVERWRITE_MASTER_CONFIGS=$BS_TRUE
+    fi
 
-    if [ "$_INSTALL_MASTER" -eq $BS_TRUE ] || [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ] || [ "$OVERWRITE_MASTER_CONFIGS" -eq $BS_TRUE ] || [ "$_CUSTOM_MASTER_CONFIG" != "null" ]; then
+    if [ "$_INSTALL_MASTER" -eq $BS_TRUE ] || [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ] || [ "$_INSTALL_SALT_API" -eq $BS_TRUE ] || [ "$OVERWRITE_MASTER_CONFIGS" -eq $BS_TRUE ] || [ "$_CUSTOM_MASTER_CONFIG" != "null" ]; then
         # Create the PKI directory
         [ -d "$_PKI_DIR/master" ] || (mkdir -p "$_PKI_DIR/master" && chmod 700 "$_PKI_DIR/master") || return 1
 
@@ -9269,7 +8008,7 @@ config_salt() {
 
             # Check if a master config file already exists and move to .bak if needed
             if [ -f "$_SALT_ETC_DIR/master" ] && [ "$CREATE_BAK" -eq "$BS_TRUE" ]; then
-                __movefile "$_SALT_ETC_DIR/master" "$_SALT_ETC_DIR/master.bak" $BS_TRUE || return 1
+                __movefile "$_SALT_ETC_DIR/master" "$_SALT_ETC_DIR/master.bak" "$BS_TRUE" || return 1
                 CONFIGURED_ANYTHING=$BS_TRUE
             fi
 
@@ -9283,7 +8022,7 @@ config_salt() {
             CONFIGURED_ANYTHING=$BS_TRUE
         fi
 
-        # Copy the master's keys if found
+        # Copy the masters keys if found
         if [ -f "$_TEMP_CONFIG_DIR/master.pem" ]; then
             __movefile "$_TEMP_CONFIG_DIR/master.pem" "$_PKI_DIR/master/" || return 1
             chmod 400 "$_PKI_DIR/master/master.pem" || return 1
@@ -9313,7 +8052,7 @@ config_salt() {
         done
     fi
 
-    if [ "$_CONFIG_ONLY" -eq $BS_TRUE ] && [ $CONFIGURED_ANYTHING -eq $BS_FALSE ]; then
+    if [ "$_CONFIG_ONLY" -eq $BS_TRUE ] && [ "$CONFIGURED_ANYTHING" -eq $BS_FALSE ]; then
         echowarn "No configuration or keys were copied over. No configuration was done!"
         exit 0
     fi
@@ -9331,6 +8070,7 @@ config_salt() {
 #   as long as the -k option is passed.
 #
 preseed_master() {
+
     # Create the PKI directory
 
     if [ "$(find "$_TEMP_KEYS_DIR" -maxdepth 1 -type f | wc -l)" -lt 1 ]; then
@@ -9365,6 +8105,7 @@ preseed_master() {
 #   This function checks if all of the installed daemons are running or not.
 #
 daemons_running_onedir() {
+
     [ "$_START_DAEMONS" -eq $BS_FALSE ] && return 0
 
     FAILED_DAEMONS=0
@@ -9402,6 +8143,7 @@ daemons_running_onedir() {
 #   This function checks if all of the installed daemons are running or not.
 #
 daemons_running() {
+
     [ "$_START_DAEMONS" -eq $BS_FALSE ] && return 0
 
     FAILED_DAEMONS=0
@@ -9416,7 +8158,7 @@ daemons_running() {
 
         # shellcheck disable=SC2009
         if [ "${DISTRO_NAME}" = "SmartOS" ]; then
-            if [ "$(svcs -Ho STA salt-$fname)" != "ON" ]; then
+            if [ "$(svcs -Ho STA "salt-$fname")" != "ON" ]; then
                 echoerror "salt-$fname was not found running"
                 FAILED_DAEMONS=$((FAILED_DAEMONS + 1))
             fi
@@ -9426,7 +8168,7 @@ daemons_running() {
         fi
     done
 
-    return $FAILED_DAEMONS
+    return ${FAILED_DAEMONS}
 }
 #
 #  Ended daemons running check function
@@ -9449,9 +8191,10 @@ if [ ${_NO_DEPS} -eq $BS_FALSE ]; then
 fi
 
 DEPS_INSTALL_FUNC="null"
-for FUNC_NAME in $(__strip_duplicates "$DEP_FUNC_NAMES"); do
-    if __function_defined "$FUNC_NAME"; then
-        DEPS_INSTALL_FUNC="$FUNC_NAME"
+# shellcheck disable=SC2086
+for FUNC_NAME in $(__strip_duplicates ${DEP_FUNC_NAMES}); do
+    if __function_defined ${FUNC_NAME}; then
+        DEPS_INSTALL_FUNC=${FUNC_NAME}
         break
     fi
 done
@@ -9561,7 +8304,7 @@ for FUNC_NAME in $(__strip_duplicates "$DAEMONS_RUNNING_FUNC_NAMES"); do
 done
 echodebug "DAEMONS_RUNNING_FUNC=${DAEMONS_RUNNING_FUNC}"
 
-# Let's get the check services function
+# Lets get the check services function
 if [ ${_DISABLE_SALT_CHECKS} -eq $BS_FALSE ]; then
     CHECK_SERVICES_FUNC_NAMES="install_${DISTRO_NAME_L}${PREFIXED_DISTRO_MAJOR_VERSION}_${ITYPE}_check_services"
     CHECK_SERVICES_FUNC_NAMES="$CHECK_SERVICES_FUNC_NAMES install_${DISTRO_NAME_L}${PREFIXED_DISTRO_MAJOR_VERSION}${PREFIXED_DISTRO_MINOR_VERSION}_${ITYPE}_check_services"
@@ -9594,7 +8337,7 @@ fi
 
 
 # Install dependencies
-if [ ${_NO_DEPS} -eq $BS_FALSE ] && [ $_CONFIG_ONLY -eq $BS_FALSE ]; then
+if [ "${_NO_DEPS}" -eq $BS_FALSE ] && [ "$_CONFIG_ONLY" -eq $BS_FALSE ]; then
     # Only execute function is not in config mode only
     echoinfo "Running ${DEPS_INSTALL_FUNC}()"
     if ! ${DEPS_INSTALL_FUNC}; then
@@ -9605,6 +8348,7 @@ fi
 
 
 if [ "${ITYPE}" = "git" ] && [ ${_NO_DEPS} -eq ${BS_TRUE} ]; then
+    # shellcheck disable=SC2119
     if ! __git_clone_and_checkout; then
         echo "Failed to clone and checkout git repository."
         exit 1
@@ -9618,7 +8362,7 @@ if [ "$_CUSTOM_MASTER_CONFIG" != "null" ] || [ "$_CUSTOM_MINION_CONFIG" != "null
         _TEMP_CONFIG_DIR="$_SALT_ETC_DIR"
     fi
 
-    if [ ${_NO_DEPS} -eq $BS_FALSE ] && [ $_CONFIG_ONLY -eq $BS_TRUE ]; then
+    if [ "${_NO_DEPS}" -eq $BS_FALSE ] && [ "$_CONFIG_ONLY" -eq $BS_TRUE ]; then
         # Execute function to satisfy dependencies for configuration step
         echoinfo "Running ${DEPS_INSTALL_FUNC}()"
         if ! ${DEPS_INSTALL_FUNC}; then
@@ -9692,6 +8436,7 @@ fi
 if [ "$STARTDAEMONS_INSTALL_FUNC" != "null" ] && [ ${_START_DAEMONS} -eq $BS_TRUE ]; then
     echoinfo "Running ${STARTDAEMONS_INSTALL_FUNC}()"
     echodebug "Waiting ${_SLEEP} seconds for processes to settle before checking for them"
+    # shellcheck disable=SC2086
     sleep ${_SLEEP}
     if ! ${STARTDAEMONS_INSTALL_FUNC}; then
         echoerror "Failed to run ${STARTDAEMONS_INSTALL_FUNC}()!!!"
@@ -9703,6 +8448,7 @@ fi
 if [ "$DAEMONS_RUNNING_FUNC" != "null" ] && [ ${_START_DAEMONS} -eq $BS_TRUE ]; then
     echoinfo "Running ${DAEMONS_RUNNING_FUNC}()"
     echodebug "Waiting ${_SLEEP} seconds for processes to settle before checking for them"
+    # shellcheck disable=SC2086
     sleep ${_SLEEP}  # Sleep a little bit to let daemons start
     if ! ${DAEMONS_RUNNING_FUNC}; then
         echoerror "Failed to run ${DAEMONS_RUNNING_FUNC}()!!!"
@@ -9725,7 +8471,7 @@ if [ "$DAEMONS_RUNNING_FUNC" != "null" ] && [ ${_START_DAEMONS} -eq $BS_TRUE ];
 
             echodebug "Running salt-$fname by hand outputs: $(nohup salt-$fname -l debug)"
 
-            [ ! -f /var/log/salt/$fname ] && echodebug "/var/log/salt/$fname does not exist. Can't cat its contents!" && continue
+            [ ! -f "/var/log/salt/$fname" ] && echodebug "/var/log/salt/$fname does not exist. Can't cat its contents!" && continue
 
             echodebug "DAEMON LOGS for $fname:"
             echodebug "$(cat /var/log/salt/$fname)"
diff --git a/salt/cloud/deploy/curl-bootstrap-git.sh b/salt/cloud/deploy/curl-bootstrap-git.sh
index e02293d06e2..b0a23198e8d 100644
--- a/salt/cloud/deploy/curl-bootstrap-git.sh
+++ b/salt/cloud/deploy/curl-bootstrap-git.sh
@@ -7,11 +7,11 @@
 #
 # It has been designed as an example, to be customized for your own needs.
 
-curl -L https://bootstrap.saltstack.com | sudo sh -s -- "$@" git develop
+curl -L https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh -s -- "$@" git develop
 
 # By default, Salt Cloud now places the minion's keys and configuration in
 # /tmp/.saltcloud/ before executing the deploy script. After it has executed,
 # these temporary files are removed. If you don't want salt-bootstrap to handle
 # these files, comment out the above command, and uncomment the below command.
 
-#curl -L https://bootstrap.saltstack.com | sudo sh -s git develop
+#curl -L https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh -s git develop
diff --git a/salt/cloud/deploy/curl-bootstrap.sh b/salt/cloud/deploy/curl-bootstrap.sh
index b13fdf43b5f..d3e9466b011 100644
--- a/salt/cloud/deploy/curl-bootstrap.sh
+++ b/salt/cloud/deploy/curl-bootstrap.sh
@@ -7,11 +7,11 @@
 #
 # It has been designed as an example, to be customized for your own needs.
 
-curl -L https://bootstrap.saltstack.com | sudo sh -s -- "$@"
+curl -L https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh -s -- "$@"
 
 # By default, Salt Cloud now places the minion's keys and configuration in
 # /tmp/.saltcloud/ before executing the deploy script. After it has executed,
 # these temporary files are removed. If you don't want salt-bootstrap to handle
 # these files, comment out the above command, and uncomment the below command.
 
-#curl -L https://bootstrap.saltstack.com | sudo sh
+#curl -L https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh
diff --git a/salt/cloud/deploy/python-bootstrap.sh b/salt/cloud/deploy/python-bootstrap.sh
index eb4c83be092..3aa172224e0 100644
--- a/salt/cloud/deploy/python-bootstrap.sh
+++ b/salt/cloud/deploy/python-bootstrap.sh
@@ -7,11 +7,11 @@
 #
 # It has been designed as an example, to be customized for your own needs.
 
-python -c 'import urllib; print urllib.urlopen("https://bootstrap.saltstack.com").read()' | sudo sh -s -- "$@"
+python -c 'import urllib; print urllib.urlopen("https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh").read()' | sudo sh -s -- "$@"
 
 # By default, Salt Cloud now places the minion's keys and configuration in
 # /tmp/.saltcloud/ before executing the deploy script. After it has executed,
 # these temporary files are removed. If you don't want salt-bootstrap to handle
 # these files, comment out the above command, and uncomment the below command.
 
-#python -c 'import urllib; print urllib.urlopen("https://bootstrap.saltstack.com").read()' | sudo sh
+#python -c 'import urllib; print urllib.urlopen("https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh").read()' | sudo sh
diff --git a/salt/cloud/deploy/wget-bootstrap-nocert.sh b/salt/cloud/deploy/wget-bootstrap-nocert.sh
index 68f6d86c311..d21adb32a46 100644
--- a/salt/cloud/deploy/wget-bootstrap-nocert.sh
+++ b/salt/cloud/deploy/wget-bootstrap-nocert.sh
@@ -7,11 +7,11 @@
 #
 # It has been designed as an example, to be customized for your own needs.
 
-wget --no-check-certificate -O - https://bootstrap.saltstack.com | sudo sh -s -- "$@"
+wget --no-check-certificate -O - https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh -s -- "$@"
 
 # By default, Salt Cloud now places the minion's keys and configuration in
 # /tmp/.saltcloud/ before executing the deploy script. After it has executed,
 # these temporary files are removed. If you don't want salt-bootstrap to handle
 # these files, comment out the above command, and uncomment the below command.
 
-#wget --no-check-certificate -O - https://bootstrap.saltstack.com | sudo sh
+#wget --no-check-certificate -O - https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh
diff --git a/salt/cloud/deploy/wget-bootstrap.sh b/salt/cloud/deploy/wget-bootstrap.sh
index bea8330318c..1c7d41d4375 100644
--- a/salt/cloud/deploy/wget-bootstrap.sh
+++ b/salt/cloud/deploy/wget-bootstrap.sh
@@ -7,11 +7,11 @@
 #
 # It has been designed as an example, to be customized for your own needs.
 
-wget -O - https://bootstrap.saltstack.com | sudo sh -s -- "$@"
+wget -O - https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh -s -- "$@"
 
 # By default, Salt Cloud now places the minion's keys and configuration in
 # /tmp/.saltcloud/ before executing the deploy script. After it has executed,
 # these temporary files are removed. If you don't want salt-bootstrap to handle
 # these files, comment out the above command, and uncomment the below command.
 
-#wget -O - https://bootstrap.saltstack.com | sudo sh
+#wget -O - https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
index dba7f316801..511fef0a9e5 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
@@ -2519,7 +2519,7 @@ def syndic_config(
                 ),
             )
         ),
-        "user": opts.get("syndic_user", opts["user"]),
+        "user": opts.get("syndic_user", master_opts["user"]),
         "sock_dir": os.path.join(
             opts["cachedir"], opts.get("syndic_sock_dir", opts["sock_dir"])
         ),
@@ -2527,6 +2527,7 @@ def syndic_config(
         "cachedir": master_opts["cachedir"],
     }
     opts.update(syndic_opts)
+
     # Prepend root_dir to other paths
     prepend_root_dirs = [
         "pki_dir",
diff --git a/salt/fileserver/__init__.py b/salt/fileserver/__init__.py
index fe4b3b8e496..b71af97b12b 100644
--- a/salt/fileserver/__init__.py
+++ b/salt/fileserver/__init__.py
@@ -11,7 +11,6 @@ import time
 from collections.abc import Sequence
 
 import salt.loader
-import salt.utils.data
 import salt.utils.files
 import salt.utils.path
 import salt.utils.url
@@ -147,13 +146,7 @@ def check_file_list_cache(opts, form, list_cache, w_lock):
                             opts.get("fileserver_list_cache_time", 20),
                             list_cache,
                         )
-                        return (
-                            salt.utils.data.decode(
-                                salt.payload.load(fp_).get(form, [])
-                            ),
-                            False,
-                            False,
-                        )
+                        return salt.payload.load(fp_).get(form, []), False, False
                 elif _lock_cache(w_lock):
                     # Set the w_lock and go
                     refresh_cache = True
@@ -189,7 +182,7 @@ def check_env_cache(opts, env_cache):
     try:
         with salt.utils.files.fopen(env_cache, "rb") as fp_:
             log.trace("Returning env cache data from %s", env_cache)
-            return salt.utils.data.decode(salt.payload.load(fp_))
+            return salt.payload.load(fp_)
     except OSError:
         pass
     return None
diff --git a/salt/fileserver/roots.py b/salt/fileserver/roots.py
index e81f37dcf02..cb27396b979 100644
--- a/salt/fileserver/roots.py
+++ b/salt/fileserver/roots.py
@@ -325,7 +325,7 @@ def file_hash(load, fnd):
 
 def _file_lists(load, form):
     """
-    Return a dict containing the file lists for files, dirs, emtydirs and symlinks
+    Return a dict containing the file lists for files, dirs, empty dirs and symlinks
     """
     if "env" in load:
         # "env" is not supported; Use "saltenv".
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 7afcbd5cbae..cc9b29aa0ce 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -287,7 +287,12 @@ def _linux_gpu_data():
         "matrox",
         "aspeed",
     ]
-    gpu_classes = ("vga compatible controller", "3d controller", "display controller")
+    gpu_classes = (
+        "3d controller",
+        "display controller",
+        "processing accelerators",
+        "vga compatible controller",
+    )
 
     devs = []
     try:
@@ -1276,6 +1281,7 @@ def _virtual(osdata):
             "cannot execute it. Grains output might not be "
             "accurate.",
             command,
+            once=True,
         )
     return grains
 
@@ -1870,6 +1876,7 @@ _OS_FAMILY_MAP = {
     "SLES_SAP": "Suse",
     "Arch ARM": "Arch",
     "Manjaro": "Arch",
+    "Manjaro ARM": "Arch",
     "Antergos": "Arch",
     "EndeavourOS": "Arch",
     "ALT": "RedHat",
@@ -2518,10 +2525,31 @@ def _systemd():
     """
     Return the systemd grain
     """
-    systemd_info = __salt__["cmd.run"]("systemctl --version").splitlines()
+    systemd_version = "UNDEFINED"
+    systemd_features = ""
+    try:
+        systemd_output = __salt__["cmd.run_all"]("systemctl --version")
+    except Exception:  # pylint: disable=broad-except
+        log.error("Exception while executing `systemctl --version`", exc_info=True)
+        return {
+            "version": systemd_version,
+            "features": systemd_features,
+        }
+    if systemd_output.get("retcode") == 0:
+        systemd_info = systemd_output.get("stdout", "").splitlines()
+        try:
+            if systemd_info[0].startswith("systemd "):
+                systemd_version = systemd_info[0].split()[1]
+                systemd_features = systemd_info[1]
+        except IndexError:
+            pass
+    if systemd_version == "UNDEFINED" or systemd_features == "":
+        log.error(
+            "Unexpected output returned by `systemctl --version`: %s", systemd_output
+        )
     return {
-        "version": systemd_info[0].split()[1],
-        "features": systemd_info[1],
+        "version": systemd_version,
+        "features": systemd_features,
     }
 
 
diff --git a/salt/grains/disks.py b/salt/grains/disks.py
index f61ad6d6b34..7a3f0482770 100644
--- a/salt/grains/disks.py
+++ b/salt/grains/disks.py
@@ -16,6 +16,7 @@ import salt.utils.platform
 __salt__ = {
     "cmd.run": salt.modules.cmdmod._run_quiet,
     "cmd.run_all": salt.modules.cmdmod._run_all_quiet,
+    "cmd.powershell": salt.modules.cmdmod.powershell,
 }
 
 log = logging.getLogger(__name__)
@@ -153,41 +154,28 @@ def _linux_disks():
 
 
 def _windows_disks():
-    wmic = salt.utils.path.which("wmic")
-
-    namespace = r"\\root\microsoft\windows\storage"
-    path = "MSFT_PhysicalDisk"
-    get = "DeviceID,MediaType"
 
+    cmd = "Get-PhysicalDisk | Select DeviceID, MediaType"
     ret = {"disks": [], "ssds": []}
 
-    cmdret = __salt__["cmd.run_all"](
-        "{} /namespace:{} path {} get {} /format:table".format(
-            wmic, namespace, path, get
-        )
-    )
+    drive_info = __salt__["cmd.powershell"](cmd)
 
-    if cmdret["retcode"] != 0:
-        log.trace("Disk grain does not support this version of Windows")
-    else:
-        for line in cmdret["stdout"].splitlines():
-            info = line.split()
-            if len(info) != 2 or not info[0].isdigit() or not info[1].isdigit():
-                continue
-            device = rf"\\.\PhysicalDrive{info[0]}"
-            mediatype = info[1]
-            if mediatype == "3":
-                log.trace("Device %s reports itself as an HDD", device)
-                ret["disks"].append(device)
-            elif mediatype == "4":
-                log.trace("Device %s reports itself as an SSD", device)
-                ret["ssds"].append(device)
-                ret["disks"].append(device)
-            elif mediatype == "5":
-                log.trace("Device %s reports itself as an SCM", device)
-                ret["disks"].append(device)
-            else:
-                log.trace("Device %s reports itself as Unspecified", device)
-                ret["disks"].append(device)
+    if not drive_info:
+        log.trace("No physical discs found")
+        return ret
+
+    # We need a list of dict
+    if isinstance(drive_info, dict):
+        drive_info = [drive_info]
+
+    for drive in drive_info:
+        # Make sure we have a valid drive type
+        if drive["MediaType"].lower() not in ["hdd", "ssd", "scm", "unspecified"]:
+            log.trace(f'Unknown media type: {drive["MediaType"]}')
+            continue
+        device = rf'\\.\PhysicalDrive{drive["DeviceID"]}'
+        ret["disks"].append(device)
+        if drive["MediaType"].lower() == "ssd":
+            ret["ssds"].append(device)
 
     return ret
diff --git a/salt/grains/extra.py b/salt/grains/extra.py
index 3947590e2b6..0d2fa18aa6d 100644
--- a/salt/grains/extra.py
+++ b/salt/grains/extra.py
@@ -67,7 +67,10 @@ def config():
 def __secure_boot(efivars_dir):
     """Detect if secure-boot is enabled."""
     enabled = False
-    sboot = glob.glob(os.path.join(efivars_dir, "SecureBoot-*/data"))
+    if "efivars" == os.path.basename(efivars_dir):
+        sboot = glob.glob(os.path.join(efivars_dir, "SecureBoot-*"))
+    else:
+        sboot = glob.glob(os.path.join(efivars_dir, "SecureBoot-*/data"))
     if len(sboot) == 1:
         # The minion is usually running as a privileged user, but is
         # not the case for the master.  Seems that the master can also
@@ -80,6 +83,17 @@ def __secure_boot(efivars_dir):
     return enabled
 
 
+def get_secure_boot_path():
+    """
+    Provide paths for secure boot directories and files
+    """
+    efivars_path = next(
+        filter(os.path.exists, ["/sys/firmware/efi/efivars", "/sys/firmware/efi/vars"]),
+        None,
+    )
+    return efivars_path
+
+
 def uefi():
     """Populate UEFI grains."""
     if salt.utils.platform.is_freebsd():
@@ -90,18 +104,11 @@ def uefi():
             "efi-secure-boot": False,
         }
     else:
-        # Works on Linux and Apple ?
-        efivars_dir = next(
-            filter(
-                os.path.exists, ["/sys/firmware/efi/efivars", "/sys/firmware/efi/vars"]
-            ),
-            None,
-        )
+        efivars_dir = get_secure_boot_path()
         grains = {
             "efi": bool(efivars_dir),
             "efi-secure-boot": __secure_boot(efivars_dir) if efivars_dir else False,
         }
-
     return grains
 
 
diff --git a/salt/grains/opts.py b/salt/grains/opts.py
index c014f484bcb..3d63d41b7e9 100644
--- a/salt/grains/opts.py
+++ b/salt/grains/opts.py
@@ -11,5 +11,5 @@ def opts():
     if __opts__.get("grain_opts", False) or (
         isinstance(__pillar__, dict) and __pillar__.get("grain_opts", False)
     ):
-        return __opts__
+        return {"opts": __opts__}
     return {}
diff --git a/salt/loader/context.py b/salt/loader/context.py
index 88a6472a8f3..38d0093a8ba 100644
--- a/salt/loader/context.py
+++ b/salt/loader/context.py
@@ -43,6 +43,9 @@ class NamedLoaderContext(collections.abc.MutableMapping):
         self.loader_context = loader_context
         self.default = default
 
+    def with_default(self, default):
+        return NamedLoaderContext(self.name, self.loader_context, default=default)
+
     def loader(self):
         """
         The LazyLoader in the current context. This will return None if there
@@ -68,10 +71,12 @@ class NamedLoaderContext(collections.abc.MutableMapping):
         loader = self.loader()
         if loader is None:
             return self.default
-        if self.name == "__context__":
-            return loader.pack[self.name]
         if self.name == loader.pack_self:
             return loader
+        elif self.name == "__context__":
+            return loader.pack[self.name]
+        elif self.name == "__opts__":
+            return loader.pack[self.name]
         try:
             return loader.pack[self.name]
         except KeyError:
diff --git a/salt/loader/dunder.py b/salt/loader/dunder.py
index d3027098b5a..3b198b1497f 100644
--- a/salt/loader/dunder.py
+++ b/salt/loader/dunder.py
@@ -8,3 +8,7 @@ loader_context = salt.loader.context.LoaderContext()
 
 
 __file_client__ = loader_context.named_context("__file_client__", default=None)
+__opts__ = loader_context.named_context("__opts__")
+__context__ = loader_context.named_context("__context__")
+__pillar__ = loader_context.named_context("__pillar__")
+__grains__ = loader_context.named_context("__grains__")
diff --git a/salt/master.py b/salt/master.py
index 9c80fd24284..8ee892999e9 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -814,6 +814,10 @@ class Master(SMaster):
             for _, opts in iter_transport_opts(self.opts):
                 chan = salt.channel.server.PubServerChannel.factory(opts)
                 chan.pre_fork(self.process_manager, kwargs={"secrets": SMaster.secrets})
+                if not chan.transport.started.wait(60):
+                    raise salt.exceptions.SaltMasterError(
+                        "Publish server did not start within 60 seconds. Something went wrong.",
+                    )
                 pub_channels.append(chan)
 
             log.info("Creating master event publisher process")
@@ -821,6 +825,10 @@ class Master(SMaster):
                 self.opts
             )
             ipc_publisher.pre_fork(self.process_manager)
+            if not ipc_publisher.transport.started.wait(30):
+                raise salt.exceptions.SaltMasterError(
+                    "IPC publish server did not start within 30 seconds. Something went wrong."
+                )
             self.process_manager.add_process(
                 EventMonitor,
                 args=[self.opts, ipc_publisher],
@@ -1109,8 +1117,8 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
         Create a salt master worker process
 
         :param dict opts: The salt options
-        :param dict mkey: The user running the salt master and the AES key
-        :param dict key: The user running the salt master and the RSA key
+        :param dict mkey: The user running the salt master and the RSA key
+        :param dict key: The user running the salt master and the AES key
 
         :rtype: MWorker
         :return: Master worker
diff --git a/salt/minion.py b/salt/minion.py
index d2cf7c7fb96..ce19e12ffc3 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -17,6 +17,7 @@ import threading
 import time
 import traceback
 import types
+import uuid
 
 import tornado
 import tornado.gen
@@ -280,6 +281,7 @@ def get_proc_dir(cachedir, **kwargs):
          made. Same applies if the directory is already owned by this
          gid. Must be int. Works only on unix/unix like systems.
     """
+    # pylint: disable=logging-fstring-interpolation
     fn_ = os.path.join(cachedir, "proc")
     mode = kwargs.pop("mode", None)
 
@@ -305,11 +307,13 @@ def get_proc_dir(cachedir, **kwargs):
         uid = kwargs.pop("uid", -1)
         gid = kwargs.pop("gid", -1)
 
+        # pylint: disable=logging-fstring-interpolation
         # if uid and gid are both -1 then go ahead with
         # no changes at all
         if (d_stat.st_uid != uid or d_stat.st_gid != gid) and [
             i for i in (uid, gid) if i != -1
         ]:
+            # pylint: disable=logging-fstring-interpolation
             os.chown(fn_, uid, gid)
 
     return fn_
@@ -1039,7 +1043,6 @@ class MinionManager(MinionBase):
         self.max_auth_wait = self.opts["acceptance_wait_time_max"]
         self.minions = []
         self.jid_queue = []
-
         self.io_loop = tornado.ioloop.IOLoop.current()
         self.process_manager = ProcessManager(name="MultiMinionProcessManager")
         self.io_loop.spawn_callback(
@@ -1066,10 +1069,11 @@ class MinionManager(MinionBase):
         self.event.subscribe("")
         self.event.set_event_handler(self.handle_event)
 
-    @tornado.gen.coroutine
-    def handle_event(self, package):
-        for minion in self.minions:
-            minion.handle_event(package)
+    async def handle_event(self, package):
+        try:
+            await asyncio.gather(*[_.handle_event(package) for _ in self.minions])
+        except Exception as exc:  # pylint: disable=broad-except
+            log.error("Error dispatching event. %s", exc)
 
     def _create_minion_object(
         self,
@@ -1393,13 +1397,8 @@ class Minion(MinionBase):
         self.req_channel = salt.channel.client.AsyncReqChannel.factory(
             self.opts, io_loop=self.io_loop
         )
-
-        if hasattr(
-            self.req_channel, "connect"
-        ):  # TODO: consider generalizing this for all channels
-            log.debug("Connecting minion's long-running req channel")
-            yield self.req_channel.connect()
-
+        log.debug("Connecting minion's long-running req channel")
+        yield self.req_channel.connect()
         yield self._post_master_init(master)
 
     @tornado.gen.coroutine
@@ -1622,6 +1621,7 @@ class Minion(MinionBase):
         return functions, returners, errors, executors
 
     def _send_req_sync(self, load, timeout):
+        # XXX: Signing should happen in RequestChannel to be fixed in 3008
         if self.opts["minion_sign_messages"]:
             log.trace("Signing event to be published onto the bus.")
             minion_privkey_path = os.path.join(self.opts["pki_dir"], "minion.pem")
@@ -1629,18 +1629,28 @@ class Minion(MinionBase):
                 minion_privkey_path, salt.serializers.msgpack.serialize(load)
             )
             load["sig"] = sig
-
-        with salt.utils.event.get_event(
-            "minion", opts=self.opts, listen=False
-        ) as event:
-            return event.fire_event(
+        with salt.utils.event.get_event("minion", opts=self.opts, listen=True) as event:
+            request_id = str(uuid.uuid4())
+            log.trace("Send request to main id=%s", request_id)
+            event.fire_event(
                 load,
-                f"__master_req_channel_payload/{self.opts['master']}",
+                f"__master_req_channel_payload/{request_id}/{self.opts['master']}",
                 timeout=timeout,
             )
+            ret = event.get_event(
+                tag=f"__master_req_channel_return/{request_id}",
+                wait=timeout,
+            )
+            log.trace("Reply from main %s", request_id)
+            if ret is None:
+                log.error("Timeout waiting for response")
+                return
+            return ret["ret"]
 
     @tornado.gen.coroutine
     def _send_req_async(self, load, timeout):
+        # XXX: Signing should happen in RequestChannel to be fixed in 3008
+        # XXX: This is only used by syndic
         if self.opts["minion_sign_messages"]:
             log.trace("Signing event to be published onto the bus.")
             minion_privkey_path = os.path.join(self.opts["pki_dir"], "minion.pem")
@@ -1648,31 +1658,49 @@ class Minion(MinionBase):
                 minion_privkey_path, salt.serializers.msgpack.serialize(load)
             )
             load["sig"] = sig
-
-        with salt.utils.event.get_event(
-            "minion", opts=self.opts, listen=False
-        ) as event:
-            ret = yield event.fire_event_async(
+        with salt.utils.event.get_event("minion", opts=self.opts, listen=True) as event:
+            request_id = str(uuid.uuid4())
+            log.trace("Send request to main id=%s", request_id)
+            yield event.fire_event_async(
                 load,
-                f"__master_req_channel_payload/{self.opts['master']}",
+                f"__master_req_channel_payload/{request_id}/{self.opts['master']}",
                 timeout=timeout,
             )
-            raise tornado.gen.Return(ret)
+            start = time.time()
+            while time.time() - start < timeout:
+                ret = event.get_event(
+                    tag=f"__master_req_channel_return/{request_id}", no_block=True
+                )
+                if ret:
+                    break
+                yield tornado.gen.sleep(0.3)
+            else:
+                raise TimeoutError("Did not recieve return event")
+            log.trace("Reply from main %s", request_id)
+            raise tornado.gen.Return(ret["ret"])
 
-    def _fire_master(
-        self,
-        data=None,
-        tag=None,
-        events=None,
-        pretag=None,
-        timeout=60,
-        sync=True,
-        timeout_handler=None,
-        include_startup_grains=False,
+    @tornado.gen.coroutine
+    def _send_req_async_main(self, load, timeout):
+        """
+        Send a request to the master's request server. To be called from the
+        top level process in the main thread only. Worker threads and
+        processess should call _send_req_sync or _send_req_async as nessecery.
+        """
+        if self.opts["minion_sign_messages"]:
+            log.trace("Signing event to be published onto the bus.")
+            minion_privkey_path = os.path.join(self.opts["pki_dir"], "minion.pem")
+            sig = salt.crypt.sign_message(
+                minion_privkey_path, salt.serializers.msgpack.serialize(load)
+            )
+            load["sig"] = sig
+        ret = yield self.req_channel.send(
+            load, timeout=timeout, tries=self.opts["return_retry_tries"]
+        )
+        raise tornado.gen.Return(ret)
+
+    def _fire_master_prepare(
+        self, data, tag, events, pretag, include_startup_grains=False
     ):
-        """
-        Fire an event on the master, or drop message if unable to send.
-        """
         load = {
             "id": self.opts["id"],
             "cmd": "_minion_event",
@@ -1697,34 +1725,62 @@ class Minion(MinionBase):
                 if k in self.opts["start_event_grains"]
             }
             load["grains"] = grains_to_add
+        return load
 
-        if sync:
-            try:
-                self._send_req_sync(load, timeout)
-            except salt.exceptions.SaltReqTimeoutError:
+    @tornado.gen.coroutine
+    def _fire_master_main(
+        self,
+        data=None,
+        tag=None,
+        events=None,
+        pretag=None,
+        timeout=60,
+        timeout_handler=None,
+        include_startup_grains=False,
+    ):
+        load = self._fire_master_prepare(
+            data, tag, events, pretag, include_startup_grains
+        )
+        if timeout_handler is None:
+
+            def handle_timeout(*_):
                 log.info(
-                    "fire_master failed: master could not be contacted. Request timed"
-                    " out."
+                    "fire_master failed: master could not be contacted. Request"
+                    " timed out."
                 )
-                return False
-            except Exception:  # pylint: disable=broad-except
-                log.info("fire_master failed: %s", traceback.format_exc())
-                return False
-        else:
-            if timeout_handler is None:
+                return True
 
-                def handle_timeout(*_):
-                    log.info(
-                        "fire_master failed: master could not be contacted. Request"
-                        " timed out."
-                    )
-                    return True
+            timeout_handler = handle_timeout
 
-                timeout_handler = handle_timeout
+        yield self._send_req_async_main(load, timeout)
 
-            # pylint: disable=unexpected-keyword-arg
-            self._send_req_async(load, timeout)
-            # pylint: enable=unexpected-keyword-arg
+    def _fire_master(
+        self,
+        data=None,
+        tag=None,
+        events=None,
+        pretag=None,
+        timeout=60,
+        timeout_handler=None,
+        include_startup_grains=False,
+    ):
+        """
+        Fire an event on the master, or drop message if unable to send.
+        """
+        load = self._fire_master_prepare(
+            data, tag, events, pretag, include_startup_grains
+        )
+        try:
+            self._send_req_sync(load, timeout)
+        except salt.exceptions.SaltReqTimeoutError:
+            log.info(
+                "fire_master failed: master could not be contacted. Request timed"
+                " out."
+            )
+            return False
+        except Exception:  # pylint: disable=broad-except
+            log.info("fire_master failed: %s", traceback.format_exc())
+            return False
         return True
 
     async def _handle_decoded_payload(self, data):
@@ -2222,10 +2278,7 @@ class Minion(MinionBase):
                 except Exception as exc:  # pylint: disable=broad-except
                     log.error("The return failed for job %s: %s", data["jid"], exc)
 
-    def _return_pub(self, ret, ret_cmd="_return", timeout=60, sync=True):
-        """
-        Return the data from the executed command to the master server
-        """
+    def _prepare_return_pub(self, ret, ret_cmd="_return"):
         jid = ret.get("jid", ret.get("__jid__"))
         fun = ret.get("fun", ret.get("__fun__"))
         if self.opts["multiprocessing"]:
@@ -2279,7 +2332,12 @@ class Minion(MinionBase):
             if ret["jid"] == "req":
                 ret["jid"] = salt.utils.jid.gen_jid(self.opts)
             salt.utils.minion.cache_jobs(self.opts, ret["jid"], ret)
+        return load
 
+    @tornado.gen.coroutine
+    def _return_pub_main(self, ret, ret_cmd="_return", timeout=60):
+        jid = ret.get("jid", ret.get("__jid__"))
+        load = self._prepare_return_pub(ret, ret_cmd)
         if not self.opts["pub_ret"]:
             return ""
 
@@ -2293,20 +2351,38 @@ class Minion(MinionBase):
             )
             return True
 
-        if sync:
-            try:
-                ret_val = self._send_req_sync(load, timeout=timeout)
-            except SaltReqTimeoutError:
-                timeout_handler()
-                return ""
-        else:
-            # pylint: disable=unexpected-keyword-arg
-            ret_val = self._send_req_async(
-                load,
-                timeout=timeout,
-            )
-            # pylint: enable=unexpected-keyword-arg
+        try:
+            ret_val = yield self._send_req_async_main(load, timeout=timeout)
+        except SaltReqTimeoutError:
+            timeout_handler()
+            ret_val = ""
+        log.trace("ret_val = %s", ret_val)  # pylint: disable=no-member
+        raise tornado.gen.Return(ret_val)
 
+    def _return_pub(self, ret, ret_cmd="_return", timeout=60):
+        """
+        Return the data from the executed command to the master server
+        """
+        jid = ret.get("jid", ret.get("__jid__"))
+        load = self._prepare_return_pub(ret, ret_cmd)
+        if not self.opts["pub_ret"]:
+            return ""
+
+        def timeout_handler(*_):
+            log.warning(
+                "The minion failed to return the job information for job %s. "
+                "This is often due to the master being shut down or "
+                "overloaded. If the master is running, consider increasing "
+                "the worker_threads value.",
+                jid,
+            )
+            return True
+
+        try:
+            ret_val = self._send_req_sync(load, timeout=timeout)
+        except SaltReqTimeoutError:
+            timeout_handler()
+            return ""
         log.trace("ret_val = %s", ret_val)  # pylint: disable=no-member
         return ret_val
 
@@ -2314,6 +2390,9 @@ class Minion(MinionBase):
         """
         Return the data from the executed command to the master server
         """
+        # XXX: This is only used by syndic and should be moved to the Syndic class.
+        # XXX: The sync flag is only called with sync=False. Which also means
+        # deprecating sync means we can remove Minion._send_req_async.
         if not isinstance(rets, list):
             rets = [rets]
         jids = {}
@@ -2454,13 +2533,13 @@ class Minion(MinionBase):
         # Send an event to the master that the minion is live
         if self.opts["enable_legacy_startup_events"]:
             # Old style event. Defaults to False in 3001 release.
-            self._fire_master(
+            self._fire_master_main(
                 "Minion {} started at {}".format(self.opts["id"], time.asctime()),
                 "minion_start",
                 include_startup_grains=include_grains,
             )
         # send name spaced event
-        self._fire_master(
+        self._fire_master_main(
             "Minion {} started at {}".format(self.opts["id"], time.asctime()),
             tagify([self.opts["id"], "start"], "minion"),
             include_startup_grains=include_grains,
@@ -2744,21 +2823,35 @@ class Minion(MinionBase):
                 notify=data.get("notify", False),
             )
         elif tag.startswith("__master_req_channel_payload"):
-            job_master = tag.rsplit("/", 1)[1]
+            request_id, job_master = tag.rsplit("/", 2)[1:]
             if job_master == self.opts["master"]:
+                ret = None
                 try:
-                    yield _minion.req_channel.send(
+                    ret = yield _minion.req_channel.send(
                         data,
                         timeout=_minion._return_retry_timer(),
                         tries=_minion.opts["return_retry_tries"],
                     )
                 except salt.exceptions.SaltReqTimeoutError:
-                    log.error("Timeout encountered while sending %r request", data)
+                    log.error(
+                        "Timeout encountered while sending %r request. id=%s",
+                        data,
+                        request_id,
+                    )
+                    raise tornado.gen.Return()
+                with salt.utils.event.get_event(
+                    "minion", opts=self.opts, listen=False
+                ) as event:
+                    yield event.fire_event_async(
+                        {"ret": ret},
+                        f"__master_req_channel_return/{request_id}",
+                    )
             else:
-                log.debug(
-                    "Skipping req for other master: cmd=%s master=%s",
+                log.error(
+                    "Skipping req for other master: cmd=%s master=%s id=%s",
                     data["cmd"],
                     job_master,
+                    request_id,
                 )
         elif tag.startswith("pillar_refresh"):
             yield _minion.pillar_refresh(
@@ -2786,13 +2879,25 @@ class Minion(MinionBase):
             self._mine_send(tag, data)
         elif tag.startswith("fire_master"):
             if self.connected:
-                log.debug("Forwarding master event tag=%s", data["tag"])
-                self._fire_master(
+                log.debug(
+                    "Forwarding event %s to master %s",
+                    data["tag"],
+                    self.opts["master"],
+                )
+                yield self._fire_master_main(
                     data["data"],
                     data["tag"],
                     data["events"],
                     data["pretag"],
-                    sync=False,
+                )
+                log.debug(
+                    "Event sent to master %s %s", data["tag"], self.opts["master"]
+                )
+            else:
+                log.debug(
+                    "Master %s is not connected, dropping event %s",
+                    self.opts["master"],
+                    data["tag"],
                 )
         elif tag.startswith(master_event(type="disconnected")) or tag.startswith(
             master_event(type="failback")
@@ -2860,6 +2965,7 @@ class Minion(MinionBase):
                     self.req_channel = salt.channel.client.AsyncReqChannel.factory(
                         self.opts, io_loop=self.io_loop
                     )
+                    yield self.req_channel.connect()
 
                     # put the current schedule into the new loaders
                     self.opts["schedule"] = self.schedule.option("schedule")
@@ -2949,11 +3055,11 @@ class Minion(MinionBase):
                             1
                         ],
                     )
-            self._return_pub(data, ret_cmd="_return", sync=False)
+            yield self._return_pub_main(data, ret_cmd="_return")
         elif tag.startswith("_salt_error"):
             if self.connected:
                 log.debug("Forwarding salt error event tag=%s", tag)
-                self._fire_master(data, tag, sync=False)
+                yield self._fire_master_main(data, tag)
         elif tag.startswith("salt/auth/creds"):
             key = tuple(data["key"])
             log.debug(
@@ -2966,7 +3072,7 @@ class Minion(MinionBase):
         elif tag.startswith("__beacons_return"):
             if self.connected:
                 log.debug("Firing beacons to master")
-                self._fire_master(events=data["beacons"])
+                yield self._fire_master_main(events=data["beacons"])
 
     def cleanup_subprocesses(self):
         """
@@ -3164,10 +3270,9 @@ class Minion(MinionBase):
                                     "minion is running under an init system."
                                 )
 
-                    self._fire_master(
+                    self._fire_master_main(
                         "ping",
                         "minion_ping",
-                        sync=False,
                         timeout_handler=ping_timeout_handler,
                     )
                 except Exception:  # pylint: disable=broad-except
@@ -3368,12 +3473,10 @@ class Syndic(Minion):
             self._fire_master(
                 "Syndic {} started at {}".format(self.opts["id"], time.asctime()),
                 "syndic_start",
-                sync=False,
             )
         self._fire_master(
             "Syndic {} started at {}".format(self.opts["id"], time.asctime()),
             tagify([self.opts["id"], "start"], "syndic"),
-            sync=False,
         )
 
     # TODO: clean up docs
@@ -3768,7 +3871,7 @@ class SyndicManager(MinionBase):
                     "events": events,
                     "pretag": tagify(self.opts["id"], base="syndic"),
                     "timeout": self._return_retry_timer(),
-                    "sync": False,
+                    "sync": True,  # Sync needs to be true unless being called from a coroutine
                 },
             )
         if self.delayed:
diff --git a/salt/modules/archive.py b/salt/modules/archive.py
index 9651cc406d8..066ed8d3c71 100644
--- a/salt/modules/archive.py
+++ b/salt/modules/archive.py
@@ -834,18 +834,19 @@ def zip_(zip_file, sources, template=None, cwd=None, runas=None, zip64=False):
         if runas:
             os.seteuid(euid)
             os.setegid(egid)
-        if exc is not None:
-            # Wait to raise the exception until euid/egid are restored to avoid
-            # permission errors in writing to minion log.
-            if exc == zipfile.LargeZipFile:
-                raise CommandExecutionError(
-                    "Resulting zip file too large, would require ZIP64 support"
-                    "which has not been enabled. Rerun command with zip64=True"
-                )
-            else:
-                raise CommandExecutionError(
-                    f"Exception encountered creating zipfile: {exc}"
-                )
+        if "exc" in vars() or "exc" in globals():
+            if exc is not None:
+                # Wait to raise the exception until euid/egid are restored to avoid
+                # permission errors in writing to minion log.
+                if exc == zipfile.LargeZipFile:
+                    raise CommandExecutionError(
+                        "Resulting zip file too large, would require ZIP64 support"
+                        "which has not been enabled. Rerun command with zip64=True"
+                    )
+                else:
+                    raise CommandExecutionError(
+                        f"Exception encountered creating zipfile: {exc}"
+                    )
 
     return archived_files
 
diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py
index c92a4aa4195..fe1d4412d00 100644
--- a/salt/modules/cmdmod.py
+++ b/salt/modules/cmdmod.py
@@ -266,7 +266,7 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd):
     win_shell = salt.utils.path.which(win_shell)
 
     if not win_shell:
-        raise CommandExecutionError("PowerShell binary not found")
+        raise CommandExecutionError(f"PowerShell binary not found: {win_shell}")
 
     new_cmd = [win_shell, "-NonInteractive", "-NoProfile", "-ExecutionPolicy", "Bypass"]
 
@@ -290,7 +290,17 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd):
         # Strip whitespace
         if isinstance(cmd, list):
             cmd = " ".join(cmd)
-        new_cmd.extend(["-Command", f"& {{{cmd.strip()}}}"])
+
+        # Commands that are a specific keyword behave differently. They fail if
+        # you add a "&" to the front. Add those here as we find them:
+        keywords = ["$", "&", ".", "Configuration"]
+
+        for keyword in keywords:
+            if cmd.startswith(keyword):
+                new_cmd.extend(["-Command", f"{cmd.strip()}"])
+                break
+        else:
+            new_cmd.extend(["-Command", f"& {cmd.strip()}"])
 
     log.debug(new_cmd)
     return new_cmd
@@ -2689,11 +2699,15 @@ def script(
 
     :param str args: String of command line args to pass to the script. Only
         used if no args are specified as part of the `name` argument. To pass a
-        string containing spaces in YAML, you will need to doubly-quote it:
+        string containing spaces in YAML, you will need to doubly-quote it.
+        Additionally, if you need to pass falsey values (e.g., "0", "", "False"),
+        you should doubly-quote them to ensure they are correctly interpreted:
 
         .. code-block:: bash
 
             salt myminion cmd.script salt://foo.sh "arg1 'arg two' arg3"
+            salt myminion cmd.script salt://foo.sh "''0''"
+            salt myminion cmd.script salt://foo.sh "''False''"
 
     :param str cwd: The directory from which to execute the command. Defaults
         to the directory returned from Python's tempfile.mkstemp.
@@ -2835,6 +2849,10 @@ def script(
 
       .. versionadded:: 2019.2.0
 
+    :return: The return value of the script execution, including stdout, stderr,
+        and the exit code. If the script returns a falsey string value, it should be
+        doubly-quoted to ensure it is correctly interpreted by Salt.
+
     CLI Example:
 
     .. code-block:: bash
@@ -2919,8 +2937,11 @@ def script(
         os.chmod(path, 320)
         os.chown(path, __salt__["file.user_to_uid"](runas), -1)
 
-    if salt.utils.platform.is_windows() and shell.lower() != "powershell":
-        cmd_path = _cmd_quote(path, escape=False)
+    if salt.utils.platform.is_windows():
+        if shell.lower() not in ["powershell", "pwsh"]:
+            cmd_path = _cmd_quote(path, escape=False)
+        else:
+            cmd_path = path
     else:
         cmd_path = _cmd_quote(path)
 
@@ -4096,6 +4117,7 @@ def powershell(
         cmd = salt.utils.stringutils.to_str(cmd)
         encoded_cmd = True
     else:
+        cmd = f"{{{cmd}}}"
         encoded_cmd = False
 
     # Retrieve the response, while overriding shell with 'powershell'
diff --git a/salt/modules/cp.py b/salt/modules/cp.py
index b698398d42e..b6b43775ff6 100644
--- a/salt/modules/cp.py
+++ b/salt/modules/cp.py
@@ -21,7 +21,13 @@ import salt.utils.path
 import salt.utils.templates
 import salt.utils.url
 from salt.exceptions import CommandExecutionError
-from salt.loader.dunder import __file_client__
+from salt.loader.dunder import (
+    __context__,
+    __file_client__,
+    __grains__,
+    __opts__,
+    __pillar__,
+)
 
 log = logging.getLogger(__name__)
 
@@ -168,7 +174,7 @@ def _client():
     """
     if __file_client__:
         return __file_client__.value()
-    return salt.fileclient.get_file_client(__opts__)
+    return salt.fileclient.get_file_client(__opts__.value())
 
 
 def _render_filenames(path, dest, saltenv, template, **kw):
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 3f9b5a3e7aa..5f2fb12566c 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -2673,7 +2673,7 @@ def replace(
                 r_data = mmap.mmap(r_file.fileno(), 0, access=mmap.ACCESS_READ)
             except (ValueError, OSError):
                 # size of file in /proc is 0, but contains data
-                r_data = salt.utils.stringutils.to_bytes("".join(r_file))
+                r_data = b"".join(r_file)
             if search_only:
                 # Just search; bail as early as a match is found
                 if re.search(cpattern, r_data):
diff --git a/salt/modules/hashutil.py b/salt/modules/hashutil.py
index 3517f2512ac..8b91ff90ba4 100644
--- a/salt/modules/hashutil.py
+++ b/salt/modules/hashutil.py
@@ -135,7 +135,7 @@ def base64_encodefile(fname):
         path:
           to:
             data: |
-              {{ salt.hashutil.base64_encodefile('/path/to/binary_file') | indent(6) }}
+              {{ salt['hashutil.base64_encodefile']('/path/to/binary_file') | indent(6) }}
 
     The :py:func:`file.decode <salt.states.file.decode>` state function can be
     used to decode this data and write it to disk.
diff --git a/salt/modules/ini_manage.py b/salt/modules/ini_manage.py
index f5aff8881e2..6935cf2c0be 100644
--- a/salt/modules/ini_manage.py
+++ b/salt/modules/ini_manage.py
@@ -37,7 +37,7 @@ COM_REGX = re.compile(r"^\s*(#|;)\s*(.*)")
 INDENTED_REGX = re.compile(r"(\s+)(.*)")
 
 
-def set_option(file_name, sections=None, separator="=", encoding=None):
+def set_option(file_name, sections=None, separator="=", encoding=None, no_spaces=False):
     """
     Edit an ini file, replacing one or more sections. Returns a dictionary
     containing the changes made.
@@ -66,6 +66,18 @@ def set_option(file_name, sections=None, separator="=", encoding=None):
 
             .. versionadded:: 3006.6
 
+        no_spaces (bool):
+            A bool value that specifies that the key/value separator will be
+            wrapped with spaces. This parameter was added to have the ability to
+            not wrap the separator with spaces. Default is ``False``, which
+            maintains backwards compatibility.
+
+            .. warning::
+                This will affect all key/value pairs in the ini file, not just
+                the specific value being set.
+
+            .. versionadded:: 3006.10
+
     Returns:
         dict: A dictionary representing the changes made to the ini file
 
@@ -88,7 +100,9 @@ def set_option(file_name, sections=None, separator="=", encoding=None):
     """
 
     sections = sections or {}
-    inifile = _Ini.get_ini_file(file_name, separator=separator, encoding=encoding)
+    inifile = _Ini.get_ini_file(
+        file_name, separator=separator, encoding=encoding, no_spaces=no_spaces
+    )
     changes = inifile.update(sections)
     inifile.flush()
     return changes
@@ -388,18 +402,19 @@ def get_ini(file_name, separator="=", encoding=None):
 
 
 class _Section(OrderedDict):
-    def __init__(self, name, inicontents="", separator="=", commenter="#"):
+    def __init__(
+        self, name, inicontents="", separator="=", commenter="#", no_spaces=False
+    ):
         super().__init__(self)
         self.name = name
         self.inicontents = inicontents
         self.sep = separator
         self.com = commenter
+        self.no_spaces = no_spaces
 
         opt_regx_prefix = r"(\s*)(.+?)\s*"
         opt_regx_suffix = r"\s*(.*)\s*"
-        self.opt_regx_str = r"{}(\{}){}".format(
-            opt_regx_prefix, self.sep, opt_regx_suffix
-        )
+        self.opt_regx_str = rf"{opt_regx_prefix}(\{self.sep}){opt_regx_suffix}"
         self.opt_regx = re.compile(self.opt_regx_str)
 
     def refresh(self, inicontents=None):
@@ -475,7 +490,11 @@ class _Section(OrderedDict):
             # Ensure the value is either a _Section or a string
             if isinstance(value, (dict, OrderedDict)):
                 sect = _Section(
-                    name=key, inicontents="", separator=self.sep, commenter=self.com
+                    name=key,
+                    inicontents="",
+                    separator=self.sep,
+                    commenter=self.com,
+                    no_spaces=self.no_spaces,
                 )
                 sect.update(value)
                 value = sect
@@ -507,7 +526,7 @@ class _Section(OrderedDict):
         return changes
 
     def gen_ini(self):
-        yield "{0}[{1}]{0}".format(os.linesep, self.name)
+        yield f"{os.linesep}[{self.name}]{os.linesep}"
         sections_dict = OrderedDict()
         for name, value in self.items():
             # Handle Comment Lines
@@ -517,14 +536,20 @@ class _Section(OrderedDict):
             elif isinstance(value, _Section):
                 sections_dict.update({name: value})
             # Key / Value pairs
-            # Adds spaces between the separator
             else:
-                yield "{}{}{}{}".format(
-                    name,
-                    f" {self.sep} " if self.sep != " " else self.sep,
-                    value,
-                    os.linesep,
-                )
+                # multiple spaces will be a single space
+                if all(c == " " for c in self.sep):
+                    self.sep = " "
+                # Default is to add spaces
+                if self.no_spaces:
+                    if self.sep != " ":
+                        # We only strip whitespace if the delimiter is not a space
+                        self.sep = self.sep.strip()
+                else:
+                    if self.sep != " ":
+                        # We only add spaces if the delimiter itself is not a space
+                        self.sep = f" {self.sep.strip()} "
+                yield f"{name}{self.sep}{value}{os.linesep}"
         for name, value in sections_dict.items():
             yield from value.gen_ini()
 
@@ -557,15 +582,26 @@ class _Section(OrderedDict):
 
 class _Ini(_Section):
     def __init__(
-        self, name, inicontents="", separator="=", commenter="#", encoding=None
+        self,
+        name,
+        inicontents="",
+        separator="=",
+        commenter="#",
+        encoding=None,
+        no_spaces=False,
     ):
         super().__init__(
-            self, inicontents=inicontents, separator=separator, commenter=commenter
+            self,
+            inicontents=inicontents,
+            separator=separator,
+            commenter=commenter,
+            no_spaces=no_spaces,
         )
         self.name = name
         if encoding is None:
             encoding = __salt_system_encoding__
         self.encoding = encoding
+        self.no_spaces = no_spaces
 
     def refresh(self, inicontents=None):
         if inicontents is None:
@@ -612,7 +648,7 @@ class _Ini(_Section):
                 self.name, "w", encoding=self.encoding
             ) as outfile:
                 ini_gen = self.gen_ini()
-                next(ini_gen)
+                next(ini_gen)  # Next to skip the file name
                 ini_gen_list = list(ini_gen)
                 # Avoid writing an initial line separator.
                 if ini_gen_list:
@@ -624,8 +660,10 @@ class _Ini(_Section):
             )
 
     @staticmethod
-    def get_ini_file(file_name, separator="=", encoding=None):
-        inifile = _Ini(file_name, separator=separator, encoding=encoding)
+    def get_ini_file(file_name, separator="=", encoding=None, no_spaces=False):
+        inifile = _Ini(
+            file_name, separator=separator, encoding=encoding, no_spaces=no_spaces
+        )
         inifile.refresh()
         return inifile
 
diff --git a/salt/modules/localemod.py b/salt/modules/localemod.py
index e8cd9063ca3..636f6d0db97 100644
--- a/salt/modules/localemod.py
+++ b/salt/modules/localemod.py
@@ -5,6 +5,7 @@ Module for managing locales on POSIX-like systems.
 import logging
 import os
 import re
+import subprocess
 
 import salt.utils.locales
 import salt.utils.path
@@ -67,6 +68,10 @@ def _localectl_status():
     """
     if salt.utils.path.which("localectl") is None:
         raise CommandExecutionError('Unable to find "localectl"')
+    else:
+        proc = subprocess.run(["localectl"], check=False, capture_output=True)
+        if b"Failed to connect to bus: No such file or directory" in proc.stderr:
+            raise CommandExecutionError('Command "localectl" is in a degraded state.')
 
     ret = {}
     locale_ctl_out = (__salt__["cmd.run"]("localectl status") or "").strip()
diff --git a/salt/modules/mac_assistive.py b/salt/modules/mac_assistive.py
index 7a81fe87a5b..c729fb5bd56 100644
--- a/salt/modules/mac_assistive.py
+++ b/salt/modules/mac_assistive.py
@@ -186,6 +186,7 @@ class TccDB:
         self.connection = None
         self.ge_mojave_and_catalina = False
         self.ge_bigsur_and_later = False
+        self.ge_sonoma_and_later = False
 
     def _check_table_digest(self):
         # This logic comes from https://github.com/jacobsalmela/tccutil which is
@@ -201,6 +202,8 @@ class TccDB:
             elif digest in ("3d1c2a0e97", "cef70648de"):
                 # BigSur and later
                 self.ge_bigsur_and_later = True
+            elif digest in ("34abf99d20",):
+                self.ge_sonoma_and_later = True
             else:
                 raise CommandExecutionError(
                     f"TCC Database structure unknown for digest '{digest}'"
@@ -309,10 +312,56 @@ class TccDB:
                 (app_id, client_type, auth_value),
             )
             self.connection.commit()
+        elif self.ge_sonoma_and_later:
+            # CREATE TABLE access (
+            #   service        TEXT        NOT NULL,
+            #   client         TEXT        NOT NULL,
+            #   client_type    INTEGER     NOT NULL,
+            #   auth_value     INTEGER     NOT NULL,
+            #   auth_reason    INTEGER     NOT NULL,
+            #   auth_version   INTEGER     NOT NULL,
+            #   csreq          BLOB,
+            #   policy_id      INTEGER,
+            #   indirect_object_identifier_type    INTEGER,
+            #   indirect_object_identifier         TEXT NOT NULL DEFAULT 'UNUSED',
+            #   indirect_object_code_identity      BLOB,
+            #   flags          INTEGER,
+            #   last_modified  INTEGER     NOT NULL DEFAULT (CAST(strftime('%s','now') AS INTEGER)),
+            #   pid            INTEGER,
+            #   pid_version    INTEGER,
+            #   boot_uuid      TEXT NOT NULL DEFAULT 'UNUSED',
+            #   last_reminded  INTEGER     NOT NULL DEFAULT (CAST(strftime('%s','now') AS INTEGER)),
+            #   PRIMARY KEY (service, client, client_type, indirect_object_identifier),
+            #   FOREIGN KEY (policy_id)
+            self.connection.execute(
+                """
+                    INSERT or REPLACE INTO access VALUES(
+                        'kTCCServiceAccessibility',
+                        ?,
+                        ?,
+                        ?,
+                        4,
+                        1,
+                        NULL,
+                        NULL,
+                        NULL,
+                        'UNUSED',
+                        NULL,
+                        0,
+                        0,
+                        0,
+                        0,
+                        'UNUSED',
+                        ?
+                    )
+                    """,
+                (app_id, client_type, auth_value, time.time()),
+            )
+            self.connection.commit()
         return True
 
     def enabled(self, app_id):
-        if self.ge_bigsur_and_later:
+        if self.ge_bigsur_and_later or self.ge_sonoma_and_later:
             column = "auth_value"
         elif self.ge_mojave_and_catalina:
             column = "allowed"
@@ -328,7 +377,7 @@ class TccDB:
     def enable(self, app_id):
         if not self.installed(app_id):
             return False
-        if self.ge_bigsur_and_later:
+        if self.ge_bigsur_and_later or self.ge_sonoma_and_later:
             column = "auth_value"
         elif self.ge_mojave_and_catalina:
             column = "allowed"
@@ -344,7 +393,7 @@ class TccDB:
     def disable(self, app_id):
         if not self.installed(app_id):
             return False
-        if self.ge_bigsur_and_later:
+        if self.ge_bigsur_and_later or self.ge_sonoma_and_later:
             column = "auth_value"
         elif self.ge_mojave_and_catalina:
             column = "allowed"
diff --git a/salt/modules/match.py b/salt/modules/match.py
index 7c7f6d933ea..e6acf9e1049 100644
--- a/salt/modules/match.py
+++ b/salt/modules/match.py
@@ -405,7 +405,7 @@ def search_by(lookup, tgt_type="compound", minion_id=None):
 
     .. code-block:: jinja
 
-        {% set roles = salt.match.search_by({
+        {% set roles = salt['match.search_by']({
             'web': ['G@os_family:Debian not nodeX'],
             'db': ['L@node2,node3 and G@datacenter:west'],
             'caching': ['node3', 'node4'],
diff --git a/salt/modules/mine.py b/salt/modules/mine.py
index 69bd6fe4492..f57991eb1e8 100644
--- a/salt/modules/mine.py
+++ b/salt/modules/mine.py
@@ -301,7 +301,7 @@ def get(tgt, fun, tgt_type="glob", exclude_minion=False):
 
         .. code-block:: jinja
 
-            {% set minion_ips = salt.saltutil.runner('mine.get',
+            {% set minion_ips = salt['saltutil.runner']('mine.get',
                 tgt='*',
                 fun='network.ip_addrs',
                 tgt_type='glob') %}
diff --git a/salt/modules/pkg_resource.py b/salt/modules/pkg_resource.py
index 88e38b91a41..0cfadb79b10 100644
--- a/salt/modules/pkg_resource.py
+++ b/salt/modules/pkg_resource.py
@@ -321,8 +321,8 @@ def version_compare(ver1, oper, ver2, ignore_epoch=False):
 
     .. code-block:: jinja
 
-        {%- set postfix_version = salt.pkg.version('postfix') %}
-        {%- if postfix_version and salt.pkg_resource.version_compare(postfix_version, '>=', '3.3', ignore_epoch=True) %}
+        {%- set postfix_version = salt['pkg.version']('postfix') %}
+        {%- if postfix_version and salt['pkg_resource.version_compare'](postfix_version, '>=', '3.3', ignore_epoch=True) %}
           {#- do stuff #}
         {%- endif %}
 
diff --git a/salt/modules/saltutil.py b/salt/modules/saltutil.py
index 00344d70083..bd01b82935e 100644
--- a/salt/modules/saltutil.py
+++ b/salt/modules/saltutil.py
@@ -126,8 +126,8 @@ def _sync(form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None):
 def update(version=None):
     """
     Update the salt minion from the URL defined in opts['update_url']
-    VMware, Inc provides the latest builds here:
-    update_url: https://repo.saltproject.io/windows/
+    Broadcom, Inc provides the latest builds here:
+    update_url: https://packages.broadcom.com/artifactory/saltproject-generic/windows/
 
     Be aware that as of 2014-8-11 there's a bug in esky such that only the
     latest version available in the update_url can be downloaded and installed.
diff --git a/salt/modules/selinux.py b/salt/modules/selinux.py
index 21471397ea4..0f78d1be9c6 100644
--- a/salt/modules/selinux.py
+++ b/salt/modules/selinux.py
@@ -490,7 +490,7 @@ def fcontext_get_policy(
         "[[:alpha:] ]+" if filetype is None else filetype_id_to_string(filetype)
     )
     cmd = (
-        "semanage fcontext -l | egrep "
+        "semanage fcontext -l | grep -E "
         + "'^{filespec}{spacer}{filetype}{spacer}{sel_user}:{sel_role}:{sel_type}:{sel_level}{ospacer}$'".format(
             **cmd_kwargs
         )
@@ -616,7 +616,7 @@ def _fcontext_add_or_delete_policy(
     if "add" == action:
         # need to use --modify if context for name file exists, otherwise ValueError
         filespec = re.escape(name)
-        cmd = f"semanage fcontext -l | egrep '{filespec}'"
+        cmd = f"semanage fcontext -l | grep -E '{filespec} '"
         current_entry_text = __salt__["cmd.shell"](cmd, ignore_retcode=True)
         if current_entry_text != "":
             action = "modify"
@@ -762,7 +762,7 @@ def port_get_policy(name, sel_type=None, protocol=None, port=None):
         "port": port,
     }
     cmd = (
-        "semanage port -l | egrep "
+        "semanage port -l | grep -E "
         + "'^{sel_type}{spacer}{protocol}{spacer}((.*)*)[ ]{port}($|,)'".format(
             **cmd_kwargs
         )
diff --git a/salt/modules/slsutil.py b/salt/modules/slsutil.py
index 10d251a4a36..0d0b1601c79 100644
--- a/salt/modules/slsutil.py
+++ b/salt/modules/slsutil.py
@@ -130,18 +130,18 @@ def renderer(path=None, string=None, default_renderer="jinja|yaml", **kwargs):
     .. code-block:: jinja
 
         #!jinja|yaml
-        {% set apache = salt.grains.filter_by({
+        {% set apache = salt['grains.filter_by']({
             ...normal jinja map file here...
-        }, merge=salt.pillar.get('apache:lookup')) %}
+        }, merge=salt['pillar.get']('apache:lookup')) %}
         {{ apache | yaml() }}
 
     .. code-block:: python
 
         #!py
         def run():
-            apache = __salt__.grains.filter_by({
+            apache = __salt__['grains.filter_by']({
                 ...normal map here but as a python dict...
-            }, merge=__salt__.pillar.get('apache:lookup'))
+            }, merge=__salt__['pillar.get']('apache:lookup'))
             return apache
 
     Regardless of which of the above map files is used, it can be accessed from
@@ -150,7 +150,7 @@ def renderer(path=None, string=None, default_renderer="jinja|yaml", **kwargs):
 
     .. code-block:: jinja
 
-        {% set apache = salt.slsutil.renderer('map.sls') %}
+        {% set apache = salt['slsutil.renderer']('map.sls') %}
 
     CLI Example:
 
@@ -223,7 +223,7 @@ def serialize(serializer, obj, **mod_kwargs):
 
     .. code-block:: jinja
 
-        {% set json_string = salt.slsutil.serialize('json',
+        {% set json_string = salt['slsutil.serialize']('json',
             {'foo': 'Foo!'}) %}
     """
     kwargs = salt.utils.args.clean_kwargs(**mod_kwargs)
@@ -247,7 +247,7 @@ def deserialize(serializer, stream_or_string, **mod_kwargs):
 
     .. code-block:: jinja
 
-        {% set python_object = salt.slsutil.deserialize('json',
+        {% set python_object = salt['slsutil.deserialize']('json',
             '{"foo": "Foo!"}') %}
     """
     kwargs = salt.utils.args.clean_kwargs(**mod_kwargs)
diff --git a/salt/modules/system.py b/salt/modules/system.py
index c9e3db3f7b5..059c4c26ba8 100644
--- a/salt/modules/system.py
+++ b/salt/modules/system.py
@@ -12,6 +12,7 @@ Support for reboot, shutdown, etc on POSIX-like systems.
     with ``salt`` will work as expected.
 """
 
+import logging
 import os.path
 import re
 from datetime import datetime, timedelta, tzinfo
@@ -22,6 +23,8 @@ import salt.utils.platform
 from salt.exceptions import CommandExecutionError, SaltInvocationError
 from salt.utils.decorators import depends
 
+log = logging.getLogger(__name__)
+
 __virtualname__ = "system"
 
 
@@ -202,10 +205,10 @@ def _swclock_to_hwclock():
     """
     res = __salt__["cmd.run_all"](["hwclock", "--systohc"], python_shell=False)
     if res["retcode"] != 0:
-        msg = "hwclock failed to set hardware clock from software clock: {}".format(
-            res["stderr"]
+        log.warn(
+            "hwclock failed to set hardware clock from software clock: %s",
+            res["stderr"],
         )
-        raise CommandExecutionError(msg)
     return True
 
 
@@ -636,7 +639,7 @@ def get_computer_name():
 
     .. code-block:: bash
 
-        salt '*' network.get_hostname
+        salt '*' system.get_computer_name
     """
     return __salt__["network.get_hostname"]()
 
diff --git a/salt/modules/tls.py b/salt/modules/tls.py
index 2cc3c09119a..f85370cc902 100644
--- a/salt/modules/tls.py
+++ b/salt/modules/tls.py
@@ -1982,7 +1982,7 @@ if __name__ == "__main__":
         L="Centerville",
         O="SaltStack",
         OU=None,
-        emailAddress="test_system@saltstack.org",
+        emailAddress="test_system@saltproject.io",
     )
     create_ca_signed_cert("koji", "test_system")
     create_pkcs12("koji", "test_system", passphrase="test")
diff --git a/salt/modules/virtualenv_mod.py b/salt/modules/virtualenv_mod.py
index 042847fde40..cd52435e6f5 100644
--- a/salt/modules/virtualenv_mod.py
+++ b/salt/modules/virtualenv_mod.py
@@ -27,7 +27,7 @@ KNOWN_BINARY_NAMES = frozenset(
 
 log = logging.getLogger(__name__)
 
-__opts__ = {"venv_bin": salt.utils.path.which_bin(KNOWN_BINARY_NAMES) or "virtualenv"}
+__opts__ = {"venv_bin": salt.utils.path.which_bin(KNOWN_BINARY_NAMES) or "venv"}
 
 __pillar__ = {}
 
@@ -101,7 +101,7 @@ def create(
         Defaults to ``virtualenv``.
 
     system_site_packages : False
-        Passthrough argument given to virtualenv or pyvenv
+        Passthrough argument given to virtualenv or venv
 
     distribute : False
         Passthrough argument given to virtualenv
@@ -111,7 +111,7 @@ def create(
         ``distribute=True``
 
     clear : False
-        Passthrough argument given to virtualenv or pyvenv
+        Passthrough argument given to virtualenv or venv
 
     python : None (default)
         Passthrough argument given to virtualenv
@@ -126,10 +126,10 @@ def create(
         Passthrough argument given to virtualenv if not None
 
     symlinks : None
-        Passthrough argument given to pyvenv if True
+        Passthrough argument given to venv if True
 
     upgrade : None
-        Passthrough argument given to pyvenv if True
+        Passthrough argument given to venv if True
 
     user : None
         Set ownership for the virtualenv
@@ -174,12 +174,15 @@ def create(
            - VIRTUALENV_ALWAYS_COPY: 1
     """
     if venv_bin is None:
-        venv_bin = __opts__.get("venv_bin") or __pillar__.get("venv_bin")
+        venv_bin = __pillar__.get("venv_bin") or __opts__.get("venv_bin")
 
-    cmd = [venv_bin]
+    if venv_bin == "venv":
+        cmd = [sys.executable, "-m", "venv"]
+    else:
+        cmd = [venv_bin]
 
-    if "pyvenv" not in venv_bin:
-        # ----- Stop the user if pyvenv only options are used --------------->
+    if "venv" not in venv_bin:
+        # ----- Stop the user if venv only options are used ----------------->
         # If any of the following values are not None, it means that the user
         # is actually passing a True or False value. Stop Him!
         if upgrade is not None:
@@ -194,7 +197,7 @@ def create(
                     venv_bin
                 )
             )
-        # <---- Stop the user if pyvenv only options are used ----------------
+        # <---- Stop the user if venv only options are used ------------------
 
         virtualenv_version_info = virtualenv_ver(venv_bin, user=user, **kwargs)
 
@@ -264,7 +267,7 @@ def create(
         if symlinks is True:
             cmd.append("--symlinks")
 
-    # Common options to virtualenv and pyvenv
+    # Common options to virtualenv and venv
     if clear is True:
         cmd.append("--clear")
     if system_site_packages is True:
diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py
index 208381025b9..e3de699d625 100644
--- a/salt/modules/win_file.py
+++ b/salt/modules/win_file.py
@@ -1366,10 +1366,13 @@ def remove(path, force=False):
             # A file and a symlinked file are removed the same way
             path.unlink()
         else:
+            # Twangboy: This is for troubleshooting
+            is_dir = os.path.isdir(path)
+            exists = os.path.exists(path)
+            # This is a directory, list its contents and remove them recursively
             for child in path.iterdir():
                 # If it's a normal directory, recurse to remove its contents
                 remove(str(child), force)
-
             # rmdir will work now because the directory is empty
             path.rmdir()
     except OSError as exc:
diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py
index 8ef11b23041..0c098a9f796 100644
--- a/salt/modules/win_lgpo.py
+++ b/salt/modules/win_lgpo.py
@@ -5061,6 +5061,18 @@ def _remove_invalid_xmlns(xml_file):
     return xml_tree
 
 
+def _encode_xmlns_url(match):
+    """
+    Escape spaces in xmlns urls
+    """
+    before_xmlns = match.group(1)
+    xmlns = match.group(2)
+    url = match.group(3)
+    after_url = match.group(4)
+    encoded_url = re.sub(r"\s+", "%20", url)
+    return f'{before_xmlns}{xmlns}="{encoded_url}"{after_url}'
+
+
 def _parse_xml(adm_file):
     """
     Parse the admx/adml file. There are 3 scenarios (so far) that we'll likely
@@ -5107,6 +5119,12 @@ def _parse_xml(adm_file):
                 encoding = "utf-16"
                 raw = raw.decode(encoding)
             for line in raw.split("\r\n"):
+                if 'xmlns="' in line:
+                    line = re.sub(
+                        r'(.*)(\bxmlns(?::\w+)?)\s*=\s*"([^"]+)"(.*)',
+                        _encode_xmlns_url,
+                        line,
+                    )
                 if 'key="' in line:
                     start = line.index('key="')
                     q1 = line[start:].index('"') + start
@@ -5744,8 +5762,9 @@ def _set_netsh_value(profile, section, option, value):
         salt.utils.win_lgpo_netsh.set_logging_settings(
             profile=profile, setting=option, value=value, store="lgpo"
         )
-    log.trace("LGPO: Clearing netsh data for %s profile", profile)
-    __context__["lgpo.netsh_data"].pop(profile)
+    if profile in __context__["lgpo.netsh_data"]:
+        log.trace("LGPO: Clearing netsh data for %s profile", profile)
+        __context__["lgpo.netsh_data"].pop(profile, {})
     return True
 
 
diff --git a/salt/modules/win_status.py b/salt/modules/win_status.py
index 2becc4cb19b..41b59bf11f6 100644
--- a/salt/modules/win_status.py
+++ b/salt/modules/win_status.py
@@ -497,8 +497,16 @@ def _get_connected_ips(port):
 
     for conn in conns:
         if conn.status == psutil.CONN_ESTABLISHED:
-            if conn.laddr.port == port:
-                connected_ips.add(conn.laddr.ip)
+            if conn.raddr.port == port:
+                log.debug(
+                    "%s %s:%s --> %s:%s",
+                    conn.status,
+                    conn.laddr.ip,
+                    conn.laddr.port,
+                    conn.raddr.ip,
+                    conn.raddr.port,
+                )
+                connected_ips.add(conn.raddr.ip)
 
     return connected_ips
 
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index b2b0e94eba7..9b2ec22b420 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -318,7 +318,8 @@ def _get_options(**kwargs):
         ret.append(f"--branch={branch}")
 
     for item in setopt:
-        ret.extend(["--setopt", str(item)])
+        log.info("Adding configuration option '%s'", item)
+        ret.extend([f"--setopt={item}"])
 
     if get_extra_options:
         # sorting here to make order uniform, makes unit testing more reliable
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index 6899f851679..b27dda10929 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -197,6 +197,15 @@ class RemotePillarMixin:
         log.trace("ext_pillar_extra_data = %s", extra_data)
         return extra_data
 
+    def validate_return(self, data):
+        if not isinstance(data, dict):
+            msg = "Got a bad pillar from master, type {}, expecting dict: {}".format(
+                type(data).__name__, data
+            )
+            log.error(msg)
+            # raise an exception! Pillar isn't empty, we can't sync it!
+            raise SaltClientError(msg)
+
 
 class AsyncRemotePillar(RemotePillarMixin):
     """
@@ -276,14 +285,7 @@ class AsyncRemotePillar(RemotePillarMixin):
         except Exception:  # pylint: disable=broad-except
             log.exception("Exception getting pillar:")
             raise SaltClientError("Exception getting pillar.")
-
-        if not isinstance(ret_pillar, dict):
-            msg = "Got a bad pillar from master, type {}, expecting dict: {}".format(
-                type(ret_pillar).__name__, ret_pillar
-            )
-            log.error(msg)
-            # raise an exception! Pillar isn't empty, we can't sync it!
-            raise SaltClientError(msg)
+        self.validate_return(ret_pillar)
         raise tornado.gen.Return(ret_pillar)
 
     def destroy(self):
@@ -374,14 +376,7 @@ class RemotePillar(RemotePillarMixin):
         except Exception:  # pylint: disable=broad-except
             log.exception("Exception getting pillar:")
             raise SaltClientError("Exception getting pillar.")
-
-        if not isinstance(ret_pillar, dict):
-            log.error(
-                "Got a bad pillar from master, type %s, expecting dict: %s",
-                type(ret_pillar).__name__,
-                ret_pillar,
-            )
-            return {}
+        self.validate_return(ret_pillar)
         return ret_pillar
 
     def destroy(self):
diff --git a/salt/renderers/stateconf.py b/salt/renderers/stateconf.py
index d487f2d4e67..cad4dd387a5 100644
--- a/salt/renderers/stateconf.py
+++ b/salt/renderers/stateconf.py
@@ -6,7 +6,7 @@ A flexible renderer that takes a templating engine and a data format
 :platform: all
 """
 
-# See http://docs.saltstack.org/en/latest/ref/renderers/all/salt.renderers.stateconf.html
+# See http://docs.saltproject.io/en/latest/ref/renderers/all/salt.renderers.stateconf.html
 # for a guide to using this module.
 #
 # FIXME: I really need to review and simplify this renderer, it's getting out of hand!
diff --git a/salt/runners/manage.py b/salt/runners/manage.py
index 65b3455f53d..52e54bcdb91 100644
--- a/salt/runners/manage.py
+++ b/salt/runners/manage.py
@@ -623,7 +623,7 @@ def versions():
 
 def bootstrap(
     version="develop",
-    script="https://bootstrap.saltproject.io",
+    script="https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh",
     hosts="",
     script_args="",
     roster="flat",
@@ -639,7 +639,7 @@ def bootstrap(
     version : develop
         Git tag of version to install
 
-    script : https://bootstrap.saltproject.io/
+    script : https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh
         URL containing the script to execute
 
     hosts
@@ -699,8 +699,8 @@ def bootstrap(
     .. code-block:: bash
 
         salt-run manage.bootstrap hosts='host1,host2'
-        salt-run manage.bootstrap hosts='host1,host2' version='v3004.2'
-        salt-run manage.bootstrap hosts='host1,host2' version='v3004.2' script='https://bootstrap.saltproject.io/develop'
+        salt-run manage.bootstrap hosts='host1,host2' version='v3006.2'
+        salt-run manage.bootstrap hosts='host1,host2' version='v3006.2' script='https://github.com/saltstack/salt-bootstrap/develop'
     """
 
     client_opts = __opts__.copy()
@@ -769,7 +769,7 @@ def bootstrap_psexec(
 
     installer_url
         URL of minion installer executable. Defaults to the latest version from
-        https://repo.saltproject.io/windows/
+        https://packages.broadcom.com/artifactory/saltproject-generic/windows/
 
     username
         Optional user name for login on remote computer.
@@ -787,6 +787,9 @@ def bootstrap_psexec(
         salt-run manage.bootstrap_psexec hosts='host1,host2' installer_url='http://exampledomain/salt-installer.exe'
     """
 
+    # TODO: Need to make this gets the latest version from the new repo location
+    # TODO: Similar to tests/support/win_installer.py
+    # TODO: Maybe need to move that ^^^^ to a salt util
     if not installer_url:
         base_url = "https://repo.saltproject.io/windows/"
         source = urllib.request.urlopen(base_url).read()
diff --git a/salt/scripts.py b/salt/scripts.py
index 662104a7142..4e0faff3004 100644
--- a/salt/scripts.py
+++ b/salt/scripts.py
@@ -162,9 +162,12 @@ def salt_minion():
     """
     import signal
 
+    import salt.utils.debug
     import salt.utils.platform
     import salt.utils.process
 
+    salt.utils.debug.enable_sigusr1_handler()
+
     salt.utils.process.notify_systemd()
 
     import multiprocessing
diff --git a/salt/state.py b/salt/state.py
index 9865e602597..0c7c7141d50 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -20,6 +20,7 @@ import importlib
 import inspect
 import logging
 import os
+import pickle
 import random
 import re
 import site
@@ -45,6 +46,7 @@ import salt.utils.event
 import salt.utils.files
 import salt.utils.hashutils
 import salt.utils.immutabletypes as immutabletypes
+import salt.utils.jid
 import salt.utils.msgpack
 import salt.utils.platform
 import salt.utils.process
@@ -57,7 +59,7 @@ from salt.exceptions import CommandExecutionError, SaltRenderError, SaltReqTimeo
 from salt.serializers.msgpack import deserialize as msgpack_deserialize
 from salt.serializers.msgpack import serialize as msgpack_serialize
 from salt.template import compile_template, compile_template_str
-from salt.utils.odict import DefaultOrderedDict, OrderedDict
+from salt.utils.odict import DefaultOrderedDict, HashableOrderedDict
 from salt.utils.requisite import DependencyGraph, RequisiteType
 
 log = logging.getLogger(__name__)
@@ -131,11 +133,6 @@ STATE_INTERNAL_KEYWORDS = STATE_REQUISITE_KEYWORDS.union(
 ).union(STATE_RUNTIME_KEYWORDS)
 
 
-class HashableOrderedDict(OrderedDict):
-    def __hash__(self) -> int:
-        return id(self)
-
-
 def split_low_tag(tag: str) -> dict[str, Any]:
     """
     Take a low tag and split it back into the low dict that it came from
@@ -725,7 +722,21 @@ class State:
         loader="states",
         initial_pillar=None,
         file_client=None,
+        _invocation_id=None,
     ):
+        """
+        When instantiating an object of this class, do not pass
+        ``_invocation_id``. It is an internal field for tracking
+        parallel executions where no jid is available (Salt-SSH) and
+        only exposed as an init argument to work on spawning platforms.
+        """
+        if jid is not None:
+            _invocation_id = jid
+        if _invocation_id is None:
+            # For salt-ssh parallel states, we need a unique identifier
+            # for a single execution. self.jid should not be set there
+            # since it's used for other purposes as well.
+            _invocation_id = salt.utils.jid.gen_jid(opts)
         self._init_kwargs = {
             "opts": opts,
             "pillar_override": pillar_override,
@@ -736,6 +747,7 @@ class State:
             "mocked": mocked,
             "loader": loader,
             "initial_pillar": initial_pillar,
+            "_invocation_id": _invocation_id,
         }
         self.states_loader = loader
         if "grains" not in opts:
@@ -781,6 +793,7 @@ class State:
         self.pre = {}
         self.__run_num = 0
         self.jid = jid
+        self.invocation_id = _invocation_id
         self.instance_id = str(id(self))
         self.inject_globals = {}
         self.mocked = mocked
@@ -1892,12 +1905,15 @@ class State:
         return req_in_high, errors
 
     @classmethod
-    def _call_parallel_target(cls, instance, init_kwargs, name, cdata, low):
+    def _call_parallel_target(
+        cls, instance, init_kwargs, name, cdata, low, inject_globals
+    ):
         """
         The target function to call that will create the parallel thread/process
         """
         if instance is None:
             instance = cls(**init_kwargs)
+            instance.states.inject_globals = inject_globals
         # we need to re-record start/end duration here because it is impossible to
         # correctly calculate further down the chain
         utc_start_time = datetime.datetime.utcnow()
@@ -1990,7 +2006,7 @@ class State:
                     ]
                 )
 
-        troot = os.path.join(instance.opts["cachedir"], instance.jid)
+        troot = os.path.join(instance.opts["cachedir"], instance.invocation_id)
         tfile = os.path.join(troot, salt.utils.hashutils.sha1_digest(tag))
         if not os.path.isdir(troot):
             try:
@@ -2002,7 +2018,12 @@ class State:
         with salt.utils.files.fopen(tfile, "wb+") as fp_:
             fp_.write(msgpack_serialize(ret))
 
-    def call_parallel(self, cdata: dict[str, Any], low: LowChunk):
+    def call_parallel(
+        self,
+        cdata: dict[str, Any],
+        low: LowChunk,
+        inject_globals: Optional[dict[Any, Any]],
+    ):
         """
         Call the state defined in the given cdata in parallel
         """
@@ -2019,13 +2040,37 @@ class State:
             instance = None
         else:
             instance = self
+            inject_globals = None
 
         proc = salt.utils.process.Process(
             target=self._call_parallel_target,
-            args=(instance, self._init_kwargs, name, cdata, low),
+            args=(instance, self._init_kwargs, name, cdata, low, inject_globals),
             name=f"ParallelState({name})",
         )
-        proc.start()
+        try:
+            proc.start()
+        except TypeError as err:
+            # Some modules use the context to cache unpicklable objects like
+            # database connections or loader instances.
+            # Ensure we don't crash because of that on spawning platforms.
+            if "cannot pickle" not in str(err):
+                raise
+            clean_context = {}
+            for var, val in self._init_kwargs["context"].items():
+                try:
+                    pickle.dumps(val)
+                except TypeError:
+                    pass
+                else:
+                    clean_context[var] = val
+            init_kwargs = self._init_kwargs.copy()
+            init_kwargs["context"] = clean_context
+            proc = salt.utils.process.Process(
+                target=self._call_parallel_target,
+                args=(instance, init_kwargs, name, cdata, low, inject_globals),
+                name=f"ParallelState({name})",
+            )
+            proc.start()
         ret = {
             "name": name,
             "result": None,
@@ -2177,7 +2222,7 @@ class State:
                         )
                     elif not low.get("__prereq__") and low.get("parallel"):
                         # run the state call in parallel, but only if not in a prereq
-                        ret = self.call_parallel(cdata, low)
+                        ret = self.call_parallel(cdata, low, inject_globals)
                     else:
                         self.format_slots(cdata)
                         with salt.utils.files.set_umask(low.get("__umask__")):
@@ -2567,7 +2612,7 @@ class State:
                 if not proc.is_alive():
                     ret_cache = os.path.join(
                         self.opts["cachedir"],
-                        self.jid,
+                        self.invocation_id,
                         salt.utils.hashutils.sha1_digest(tag),
                     )
                     if not os.path.isfile(ret_cache):
diff --git a/salt/states/file.py b/salt/states/file.py
index 10b16cd5317..e1e2a6f39da 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -282,6 +282,7 @@ import difflib
 import itertools
 import logging
 import os
+import pathlib
 import posixpath
 import re
 import shutil
@@ -557,7 +558,26 @@ def _gen_recurse_managed_files(
             managed_directories.add(mdest)
             keep.add(mdest)
 
-    return managed_files, managed_directories, managed_symlinks, keep
+    # Sets are randomly ordered. We need to use a list so we can make sure
+    # symlinks are always at the end. This is necessary because the file must
+    # exist before we can create a symlink to it. See issue:
+    # https://github.com/saltstack/salt/issues/64630
+    new_managed_files = list(managed_files)
+    # Now let's move all the symlinks to the end
+    for link_src_relpath, _ in managed_symlinks:
+        for file_dest, file_src in managed_files:
+            # We need to convert relpath to fullpath. We're using pathlib to
+            # be platform-agnostic
+            symlink_full_path = pathlib.Path(f"{name}{os.sep}{link_src_relpath}")
+            file_dest_full_path = pathlib.Path(file_dest)
+            if symlink_full_path == file_dest_full_path:
+                new_managed_files.append(
+                    new_managed_files.pop(
+                        new_managed_files.index((file_dest, file_src))
+                    )
+                )
+
+    return new_managed_files, managed_directories, managed_symlinks, keep
 
 
 def _gen_keep_files(name, require, walk_d=None):
@@ -4645,18 +4665,26 @@ def recurse(
                                 or immediate subdirectories
 
     keep_symlinks
-        Keep symlinks when copying from the source. This option will cause
-        the copy operation to terminate at the symlink. If desire behavior
-        similar to rsync, then set this to True. This option is not taken
-        in account if ``fileserver_followsymlinks`` is set to False.
+
+        Determines how symbolic links (symlinks) are handled during the copying
+        process. When set to ``True``, the copy operation will copy the symlink
+        itself, rather than the file or directory it points to. When set to
+        ``False``, the operation will follow the symlink and copy the target
+        file or directory. If you want behavior similar to rsync, set this
+        option to ``True``.
+
+        However, if the ``fileserver_followsymlinks`` option is set to ``False``,
+        the ``keep_symlinks`` setting will be ignored, and symlinks will not be
+        copied at all.
 
     force_symlinks
-        Force symlink creation. This option will force the symlink creation.
-        If a file or directory is obstructing symlink creation it will be
-        recursively removed so that symlink creation can proceed. This
-        option is usually not needed except in special circumstances. This
-        option is not taken in account if ``fileserver_followsymlinks`` is
-        set to False.
+
+        Controls the creation of symlinks when using ``keep_symlinks``. When set
+        to ``True``, it forces the creation of symlinks by removing any existing
+        files or directories that might be obstructing their creation. This
+        removal is done recursively if a directory is blocking the symlink. This
+        option is only used when ``keep_symlinks`` is passed and is ignored if
+        ``fileserver_followsymlinks`` is set to ``False``.
 
     win_owner
         The owner of the symlink and directories if ``makedirs`` is True. If
@@ -8877,7 +8905,7 @@ def decode(
             - name: /tmp/new_file
             - encoding_type: base64
             - encoded_data: |
-                {{ salt.pillar.get('path:to:data') | indent(8) }}
+                {{ salt['pillar.get']('path:to:data') | indent(8) }}
     """
     ret = {"name": name, "changes": {}, "result": False, "comment": ""}
 
diff --git a/salt/states/ini_manage.py b/salt/states/ini_manage.py
index 9851d792734..eec3ab843f6 100644
--- a/salt/states/ini_manage.py
+++ b/salt/states/ini_manage.py
@@ -21,8 +21,61 @@ def __virtual__():
     return __virtualname__ if "ini.set_option" in __salt__ else False
 
 
-def options_present(name, sections=None, separator="=", strict=False):
+def options_present(
+    name, sections=None, separator="=", strict=False, encoding=None, no_spaces=False
+):
     """
+    Set or create a key/value pair in an ``ini`` file. Options present in the
+    ini file and not specified in the sections dict will be untouched, unless
+    the ``strict: True`` flag is used.
+
+    Sections that do not exist will be created.
+
+    Args:
+
+        name (str):
+            The path to the ini file
+
+        sections (dict):
+            A dictionary of sections and key/value pairs that will be used to
+            update the ini file. Other sections and key/value pairs in the ini
+            file will be untouched unless ``strict: True`` is passed.
+
+        separator (str):
+            The character used to separate keys and values. Standard ini files
+            use the "=" character. The default is ``=``.
+
+        strict (bool):
+            A boolean value that specifies that the ``sections`` dictionary
+            contains all settings in the ini file. ``True`` will create an ini
+            file with only the values specified in ``sections``. ``False`` will
+            append or update values in an existing ini file and leave the rest
+            untouched.
+
+        encoding (str):
+            A string value representing encoding of the target ini file. If
+            ``None`` is passed, it uses the system default which is likely
+            ``utf-8``. Default is ``None``
+
+            .. versionadded:: 3006.10
+
+        no_spaces (bool):
+            A bool value that specifies that the key/value separator will be
+            wrapped with spaces. This parameter was added to have the ability to
+            not wrap the separator with spaces. Default is ``False``, which
+            maintains backwards compatibility.
+
+            .. warning::
+                This will affect all key/value pairs in the ini file, not just
+                the specific value being set.
+
+            .. versionadded:: 3006.10
+
+    Returns:
+        dict: A dictionary containing list of changes made
+
+    Example:
+
     .. code-block:: yaml
 
         /home/saltminion/api-paste.ini:
@@ -35,12 +88,6 @@ def options_present(name, sections=None, separator="=", strict=False):
                   secondoption: 'secondvalue'
                 test1:
                   testkey1: 'testval121'
-
-    options present in file and not specified in sections
-    dict will be untouched, unless `strict: True` flag is
-    used
-
-    changes dict will contain the list of changes made
     """
     ret = {
         "name": name,
@@ -58,7 +105,9 @@ def options_present(name, sections=None, separator="=", strict=False):
             for sname, sbody in sections.items():
                 if not isinstance(sbody, (dict, OrderedDict)):
                     options.update({sname: sbody})
-            cur_ini = __salt__["ini.get_ini"](name, separator)
+            cur_ini = __salt__["ini.get_ini"](
+                file_name=name, separator=separator, encoding=encoding
+            )
             original_top_level_opts = {}
             original_sections = {}
             for key, val in cur_ini.items():
@@ -78,7 +127,13 @@ def options_present(name, sections=None, separator="=", strict=False):
                         ret["comment"] += f"Changed key {option}.\n"
                         ret["result"] = None
             else:
-                options_updated = __salt__["ini.set_option"](name, options, separator)
+                options_updated = __salt__["ini.set_option"](
+                    file_name=name,
+                    sections=options,
+                    separator=separator,
+                    encoding=encoding,
+                    no_spaces=no_spaces,
+                )
                 changes.update(options_updated)
             if strict:
                 for opt_to_remove in set(original_top_level_opts).difference(options):
@@ -87,7 +142,11 @@ def options_present(name, sections=None, separator="=", strict=False):
                         ret["result"] = None
                     else:
                         __salt__["ini.remove_option"](
-                            name, None, opt_to_remove, separator
+                            file_name=name,
+                            section=None,
+                            option=opt_to_remove,
+                            separator=separator,
+                            encoding=encoding,
                         )
                         changes.update(
                             {
@@ -119,7 +178,11 @@ def options_present(name, sections=None, separator="=", strict=False):
                             ret["result"] = None
                         else:
                             __salt__["ini.remove_option"](
-                                name, section_name, key_to_remove, separator
+                                file_name=name,
+                                section=section_name,
+                                option=key_to_remove,
+                                separator=separator,
+                                encoding=encoding,
                             )
                             changes[section_name].update({key_to_remove: ""})
                             changes[section_name].update(
@@ -140,7 +203,11 @@ def options_present(name, sections=None, separator="=", strict=False):
                             ret["result"] = None
                 else:
                     options_updated = __salt__["ini.set_option"](
-                        name, {section_name: section_body}, separator
+                        file_name=name,
+                        sections={section_name: section_body},
+                        separator=separator,
+                        encoding=encoding,
+                        no_spaces=no_spaces,
                     )
                     if options_updated:
                         changes[section_name].update(options_updated[section_name])
@@ -148,7 +215,13 @@ def options_present(name, sections=None, separator="=", strict=False):
                         del changes[section_name]
         else:
             if not __opts__["test"]:
-                changes = __salt__["ini.set_option"](name, sections, separator)
+                changes = __salt__["ini.set_option"](
+                    file_name=name,
+                    sections=sections,
+                    separator=separator,
+                    encoding=encoding,
+                    no_spaces=no_spaces,
+                )
     except (OSError, KeyError) as err:
         ret["comment"] = f"{err}"
         ret["result"] = False
@@ -165,8 +238,37 @@ def options_present(name, sections=None, separator="=", strict=False):
     return ret
 
 
-def options_absent(name, sections=None, separator="="):
+def options_absent(name, sections=None, separator="=", encoding=None):
     """
+    Remove a key/value pair from an ini file. Key/value pairs present in the ini
+    file and not specified in sections dict will be untouched.
+
+    Args:
+
+        name (str):
+            The path to the ini file
+
+        sections (dict):
+            A dictionary of sections and key/value pairs that will be removed
+            from the ini file. Other key/value pairs in the ini file will be
+            untouched.
+
+        separator (str):
+            The character used to separate keys and values. Standard ini files
+            use the "=" character. The default is ``=``.
+
+        encoding (str):
+            A string value representing encoding of the target ini file. If
+            ``None`` is passed, it uses the system default which is likely
+            ``utf-8``. Default is ``None``
+
+            .. versionadded:: 3006.10
+
+    Returns:
+        dict: A dictionary containing list of changes made
+
+    Example:
+
     .. code-block:: yaml
 
         /home/saltminion/api-paste.ini:
@@ -178,11 +280,6 @@ def options_absent(name, sections=None, separator="="):
                   - secondoption
                 test1:
                   - testkey1
-
-    options present in file and not specified in sections
-    dict will be untouched
-
-    changes dict will contain the list of changes made
     """
     ret = {
         "name": name,
@@ -196,7 +293,12 @@ def options_absent(name, sections=None, separator="="):
         for section in sections or {}:
             section_name = " in section " + section if section else ""
             try:
-                cur_section = __salt__["ini.get_section"](name, section, separator)
+                cur_section = __salt__["ini.get_section"](
+                    file_name=name,
+                    section=section,
+                    separator=separator,
+                    encoding=encoding,
+                )
             except OSError as err:
                 ret["comment"] = f"{err}"
                 ret["result"] = False
@@ -215,7 +317,13 @@ def options_absent(name, sections=None, separator="="):
                     ret["result"] = None
             else:
                 option = section
-                if not __salt__["ini.get_option"](name, None, option, separator):
+                if not __salt__["ini.get_option"](
+                    file_name=name,
+                    section=None,
+                    option=option,
+                    separator=separator,
+                    encoding=encoding,
+                ):
                     ret["comment"] += f"Key {option} does not exist.\n"
                     continue
                 ret["comment"] += f"Deleted key {option}.\n"
@@ -229,7 +337,11 @@ def options_absent(name, sections=None, separator="="):
         for key in keys:
             try:
                 current_value = __salt__["ini.remove_option"](
-                    name, section, key, separator
+                    file_name=name,
+                    section=section,
+                    option=key,
+                    separator=separator,
+                    encoding=encoding,
                 )
             except OSError as err:
                 ret["comment"] = f"{err}"
@@ -247,8 +359,38 @@ def options_absent(name, sections=None, separator="="):
     return ret
 
 
-def sections_present(name, sections=None, separator="="):
+def sections_present(name, sections=None, separator="=", encoding=None):
     """
+    Add sections to an ini file. This will only create empty sections. To also
+    create key/value pairs, use options_present state.
+
+    Args:
+
+        name (str):
+            The path to the ini file
+
+        sections (dict):
+            A dictionary of sections and key/value pairs that will be used to
+            update the ini file. Only the sections portion is used, key/value
+            pairs are ignored. To also set key/value pairs, use the
+            options_present state.
+
+        separator (str):
+            The character used to separate keys and values. Standard ini files
+            use the "=" character. The default is ``=``.
+
+        encoding (str):
+            A string value representing encoding of the target ini file. If
+            ``None`` is passed, it uses the system default which is likely
+            ``utf-8``. Default is ``None``
+
+            .. versionadded:: 3006.10
+
+    Returns:
+        dict: A dictionary containing list of changes made
+
+    Example:
+
     .. code-block:: yaml
 
         /home/saltminion/api-paste.ini:
@@ -257,12 +399,6 @@ def sections_present(name, sections=None, separator="="):
             - sections:
                 - section_one
                 - section_two
-
-    This will only create empty sections. To also create options, use
-    options_present state
-
-    options present in file and not specified in sections will be deleted
-    changes dict will contain the sections that changed
     """
     ret = {
         "name": name,
@@ -274,7 +410,9 @@ def sections_present(name, sections=None, separator="="):
         ret["result"] = True
         ret["comment"] = ""
         try:
-            cur_ini = __salt__["ini.get_ini"](name, separator)
+            cur_ini = __salt__["ini.get_ini"](
+                file_name=name, separator=separator, encoding=encoding
+            )
         except OSError as err:
             ret["result"] = False
             ret["comment"] = f"{err}"
@@ -293,7 +431,12 @@ def sections_present(name, sections=None, separator="="):
     for section_name in sections or []:
         section_to_update.update({section_name: {}})
     try:
-        changes = __salt__["ini.set_option"](name, section_to_update, separator)
+        changes = __salt__["ini.set_option"](
+            file_name=name,
+            section=section_to_update,
+            separator=separator,
+            encoding=encoding,
+        )
     except OSError as err:
         ret["result"] = False
         ret["comment"] = f"{err}"
@@ -307,8 +450,37 @@ def sections_present(name, sections=None, separator="="):
     return ret
 
 
-def sections_absent(name, sections=None, separator="="):
+def sections_absent(name, sections=None, separator="=", encoding=None):
     """
+    Remove sections from the ini file. All key/value pairs in the section will
+    also be removed.
+
+    Args:
+
+        name (str):
+            The path to the ini file
+
+        sections (dict):
+            A dictionary of sections and key/value pairs that will be used to
+            update the ini file. Other sections and key/value pairs in the ini
+            file will be untouched unless ``strict: True`` is passed.
+
+        separator (str):
+            The character used to separate keys and values. Standard ini files
+            use the "=" character. The default is ``=``.
+
+        encoding (str):
+            A string value representing encoding of the target ini file. If
+            ``None`` is passed, it uses the system default which is likely
+            ``utf-8``. Default is ``None``
+
+            .. versionadded:: 3006.6
+
+    Returns:
+        dict: A dictionary containing list of changes made
+
+    Example:
+
     .. code-block:: yaml
 
         /home/saltminion/api-paste.ini:
@@ -317,9 +489,6 @@ def sections_absent(name, sections=None, separator="="):
             - sections:
                 - test
                 - test1
-
-    options present in file and not specified in sections will be deleted
-    changes dict will contain the sections that changed
     """
     ret = {
         "name": name,
@@ -331,7 +500,9 @@ def sections_absent(name, sections=None, separator="="):
         ret["result"] = True
         ret["comment"] = ""
         try:
-            cur_ini = __salt__["ini.get_ini"](name, separator)
+            cur_ini = __salt__["ini.get_ini"](
+                file_name=name, separator=separator, encoding=encoding
+            )
         except OSError as err:
             ret["result"] = False
             ret["comment"] = f"{err}"
@@ -347,7 +518,9 @@ def sections_absent(name, sections=None, separator="="):
         return ret
     for section in sections or []:
         try:
-            cur_section = __salt__["ini.remove_section"](name, section, separator)
+            cur_section = __salt__["ini.remove_section"](
+                file_name=name, section=section, separator=separator, encoding=encoding
+            )
         except OSError as err:
             ret["result"] = False
             ret["comment"] = f"{err}"
diff --git a/salt/states/linux_acl.py b/salt/states/linux_acl.py
index 61429b592cf..f6db6e88936 100644
--- a/salt/states/linux_acl.py
+++ b/salt/states/linux_acl.py
@@ -708,9 +708,6 @@ def list_absent(name, acl_type, acl_names=None, recurse=False):
     acl_names
         The list of users or groups
 
-    perms
-        Remove the permissions eg.: rwx
-
     recurse
         Set the permissions recursive in the path
 
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index ff872153d23..117e8797767 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -2964,7 +2964,7 @@ def _uninstall(
 
     try:
         pkg_params = __salt__["pkg_resource.parse_targets"](
-            name, pkgs, normalize=normalize
+            name, pkgs, normalize=normalize, version=version, **kwargs
         )[0]
     except MinionError as exc:
         return {
@@ -3031,7 +3031,7 @@ def _uninstall(
     new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
     failed = []
     for param in pkg_params:
-        if __grains__["os_family"] in ["Suse", "RedHat"]:
+        if __grains__["os_family"] in ["Suse", "RedHat", "Windows"]:
             # Check if the package version set to be removed is actually removed:
             if param in new and not pkg_params[param]:
                 failed.append(param)
diff --git a/salt/states/pkgrepo.py b/salt/states/pkgrepo.py
index 07bec19991c..3e689e998a2 100644
--- a/salt/states/pkgrepo.py
+++ b/salt/states/pkgrepo.py
@@ -94,17 +94,17 @@ Using ``aptkey: False`` with ``key_url`` example:
 
 .. code-block:: yaml
 
-    deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://repo.saltproject.io/py3/ubuntu/18.04/amd64/latest bionic main:
+    deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://packages.broadcom.com/artifactory/saltproject-deb/ bionic main:
       pkgrepo.managed:
         - file: /etc/apt/sources.list.d/salt.list
-        - key_url: https://repo.saltproject.io/py3/ubuntu/18.04/amd64/latest/salt-archive-keyring.gpg
+        - key_url: https://packages.broadcom.com/artifactory/api/security/keypair/SaltProjectKey/public
         - aptkey: False
 
 Using ``aptkey: False`` with ``keyserver`` and ``keyid``:
 
 .. code-block:: yaml
 
-    deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://repo.saltproject.io/py3/ubuntu/18.04/amd64/latest bionic main:
+    deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://packages.broadcom.com/artifactory/saltproject-deb/ bionic main:
       pkgrepo.managed:
         - file: /etc/apt/sources.list.d/salt.list
         - keyserver: keyserver.ubuntu.com
diff --git a/salt/states/virtualenv_mod.py b/salt/states/virtualenv_mod.py
index 7dadfa23fd5..ceb25effb66 100644
--- a/salt/states/virtualenv_mod.py
+++ b/salt/states/virtualenv_mod.py
@@ -137,8 +137,8 @@ def managed(
     Current versions of Salt use onedir packages and will use onedir python
     interpreter by default. If you've installed Salt via out package
     repository. You will likely want to provide the path to the interpreter
-    with wich you would like to be used to create the virtual envrionment. The
-    interperter can be specified by providing the `python` option.
+    with which you would like to be used to create the virtual environment. The
+    interpreter can be specified by providing the `python` option.
     """
     ret = {"name": name, "result": True, "comment": "", "changes": {}}
 
diff --git a/salt/transport/base.py b/salt/transport/base.py
index 582d6948531..9fbe0a7717c 100644
--- a/salt/transport/base.py
+++ b/salt/transport/base.py
@@ -211,6 +211,14 @@ def ipc_publish_client(node, opts, io_loop):
 
 
 def ipc_publish_server(node, opts):
+    """
+    Create an IPC publish server.
+
+    With the exception of a master's pull_path, all ipc path permission have
+    user read/write permissions. On a master the ipc publish server's pull_path
+    permissions are also group read/write. This is done to facilitate non root
+    users running the salt cli to execute jobs on a master.
+    """
     # Default to TCP for now
     kwargs = {"transport": "tcp", "ssl": None}
     if opts["ipc_mode"] == "tcp":
@@ -233,6 +241,7 @@ def ipc_publish_server(node, opts):
             kwargs.update(
                 pub_path=os.path.join(opts["sock_dir"], "master_event_pub.ipc"),
                 pull_path=os.path.join(opts["sock_dir"], "master_event_pull.ipc"),
+                pub_path_perms=0o660,
             )
         else:
             id_hash = _minion_hash(
diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py
index 3b8bba58195..57ae753d408 100644
--- a/salt/transport/tcp.py
+++ b/salt/transport/tcp.py
@@ -309,13 +309,13 @@ class PublishClient(salt.transport.base.PublishClient):
                     _connect_to = self.path
                 else:
                     _connect_to = f"{self.host}:{self.port}"
-                log.warning(
-                    "TCP Publish Client encountered an exception while connecting to"
-                    " %s: %r, will reconnect in %d seconds - %s",
+                log.debug(
+                    "%s encountered an exception while connecting to"
+                    " %s: %r, will reconnect in %d seconds",
+                    self,
                     _connect_to,
                     exc,
                     self.backoff,
-                    self._trace,
                 )
                 if timeout and time.monotonic() - start > timeout:
                     break
@@ -428,18 +428,22 @@ class PublishClient(salt.transport.base.PublishClient):
         while not self._stream:
             # Retry quickly, we may want to increase this if it's hogging cpu.
             await asyncio.sleep(0.003)
+        tasks = []
         while True:
             msg = await self.recv()
             if msg:
                 try:
                     # XXX This is handled better in the websocket transport work
-                    await callback(msg)
+                    tasks.append(asyncio.create_task(callback(msg)))
                 except Exception as exc:  # pylint: disable=broad-except
                     log.error(
                         "Unhandled exception while running callback %r",
                         self,
                         exc_info=True,
                     )
+            for task in tasks[:]:
+                if task.done():
+                    tasks.remove(task)
 
     def on_recv(self, callback):
         """
@@ -1149,7 +1153,13 @@ class TCPPuller:
     """
 
     def __init__(
-        self, host=None, port=None, path=None, io_loop=None, payload_handler=None
+        self,
+        host=None,
+        port=None,
+        path=None,
+        mode=0o600,
+        io_loop=None,
+        payload_handler=None,
     ):
         """
         Create a new Tornado IPC server
@@ -1169,6 +1179,7 @@ class TCPPuller:
         self.host = host
         self.port = port
         self.path = path
+        self.mode = mode
         self._started = False
         self.payload_handler = payload_handler
 
@@ -1186,7 +1197,7 @@ class TCPPuller:
         # Start up the ioloop
         if self.path:
             log.trace("IPCServer: binding to socket: %s", self.path)
-            self.sock = tornado.netutil.bind_unix_socket(self.path)
+            self.sock = tornado.netutil.bind_unix_socket(self.path, self.mode)
         else:
             log.trace("IPCServer: binding to socket: %s:%s", self.host, self.port)
             self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@@ -1327,7 +1338,10 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
         pull_host=None,
         pull_port=None,
         pull_path=None,
+        pull_path_perms=0o600,
+        pub_path_perms=0o600,
         ssl=None,
+        started=None,
     ):
         self.opts = opts
         self.pub_sock = None
@@ -1337,7 +1351,13 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
         self.pull_host = pull_host
         self.pull_port = pull_port
         self.pull_path = pull_path
+        self.pull_path_perms = pull_path_perms
+        self.pub_path_perms = pub_path_perms
         self.ssl = ssl
+        if started is None:
+            self.started = multiprocessing.Event()
+        else:
+            self.started = started
 
     @property
     def topic_support(self):
@@ -1355,6 +1375,10 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             "pull_host": self.pull_host,
             "pull_port": self.pull_port,
             "pull_path": self.pull_path,
+            "pub_path_perms": self.pub_path_perms,
+            "pull_path_perms": self.pull_path_perms,
+            "ssl": self.ssl,
+            "started": self.started,
         }
 
     def publish_daemon(
@@ -1406,7 +1430,10 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             log.debug(
                 "Publish server binding pub to %s ssl=%r", self.pub_path, self.ssl
             )
-            sock = tornado.netutil.bind_unix_socket(self.pub_path)
+            with salt.utils.files.set_umask(0o177):
+                sock = tornado.netutil.bind_unix_socket(
+                    self.pub_path, self.pub_path_perms
+                )
         else:
             log.debug(
                 "Publish server binding pub to %s:%s ssl=%r",
@@ -1435,17 +1462,18 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             pull_host = self.pull_host
             pull_port = self.pull_port
 
-        self.pull_sock = TCPPuller(
-            host=self.pull_host,
-            port=self.pull_port,
-            path=self.pull_path,
-            io_loop=io_loop,
-            payload_handler=publish_payload,
-        )
-
-        # Securely create socket
         with salt.utils.files.set_umask(0o177):
+            self.pull_sock = TCPPuller(
+                host=self.pull_host,
+                port=self.pull_port,
+                path=self.pull_path,
+                mode=self.pull_path_perms,
+                io_loop=io_loop,
+                payload_handler=publish_payload,
+            )
+            # Securely create socket
             self.pull_sock.start()
+        self.started.set()
 
     def pre_fork(self, process_manager):
         """
@@ -1701,10 +1729,7 @@ class RequestClient(salt.transport.base.RequestClient):
         self._tcp_client = TCPClientKeepAlive(opts)
         self.source_ip = opts.get("source_ip")
         self.source_port = opts.get("source_ret_port")
-        self._mid = 1
-        self._max_messages = int((1 << 31) - 2)  # number of IDs before we wrap
         # TODO: max queue size
-        self.send_queue = []  # queue of messages to be sent
         self.send_future_map = {}  # mapping of request_id -> Future
 
         self._read_until_future = None
@@ -1826,18 +1851,7 @@ class RequestClient(salt.transport.base.RequestClient):
         self._stream_return_running = False
 
     def _message_id(self):
-        wrap = False
-        while self._mid in self.send_future_map:
-            if self._mid >= self._max_messages:
-                if wrap:
-                    # this shouldn't ever happen, but just in case
-                    raise Exception("Unable to find available messageid")
-                self._mid = 1
-                wrap = True
-            else:
-                self._mid += 1
-
-        return self._mid
+        return str(uuid.uuid4())
 
     def timeout_message(self, message_id, msg):
         if message_id not in self.send_future_map:
diff --git a/salt/transport/ws.py b/salt/transport/ws.py
index 8a842e18d29..5ab507b453d 100644
--- a/salt/transport/ws.py
+++ b/salt/transport/ws.py
@@ -1,6 +1,7 @@
 import asyncio
 import logging
 import multiprocessing
+import os
 import socket
 import time
 import warnings
@@ -259,7 +260,10 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
         pull_host=None,
         pull_port=None,
         pull_path=None,
+        pull_path_perms=0o600,
+        pub_path_perms=0o600,
         ssl=None,
+        started=None,
     ):
         self.opts = opts
         self.pub_host = pub_host
@@ -268,12 +272,18 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
         self.pull_host = pull_host
         self.pull_port = pull_port
         self.pull_path = pull_path
+        self.pull_path_perms = pull_path_perms
+        self.pub_path_perms = pub_path_perms
         self.ssl = ssl
         self.clients = set()
         self._run = None
         self.pub_writer = None
         self.pub_reader = None
         self._connecting = None
+        if started is None:
+            self.started = multiprocessing.Event()
+        else:
+            self.started = started
 
     @property
     def topic_support(self):
@@ -291,6 +301,10 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             "pull_host": self.pull_host,
             "pull_port": self.pull_port,
             "pull_path": self.pull_path,
+            "pull_path_perms": self.pull_path_perms,
+            "pub_path_perms": self.pub_path_perms,
+            "ssl": self.ssl,
+            "started": self.started,
         }
 
     def publish_daemon(
@@ -338,8 +352,11 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             server = aiohttp.web.Server(self.handle_request)
             runner = aiohttp.web.ServerRunner(server)
             await runner.setup()
-            site = aiohttp.web.UnixSite(runner, self.pub_path, ssl_context=ctx)
-            log.info("Publisher binding to socket %s", self.pub_path)
+            with salt.utils.files.set_umask(0o177):
+                log.info("Publisher binding to socket %s", self.pub_path)
+                site = aiohttp.web.UnixSite(runner, self.pub_path, ssl_context=ctx)
+                await site.start()
+                os.chmod(self.pub_path, self.pub_path_perms)
         else:
             sock = _get_socket(self.opts)
             sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@@ -352,7 +369,7 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             await runner.setup()
             site = aiohttp.web.SockSite(runner, sock, ssl_context=ctx)
             log.info("Publisher binding to socket %s:%s", self.pub_host, self.pub_port)
-        await site.start()
+            await site.start()
 
         self._pub_payload = publish_payload
         if self.pull_path:
@@ -360,10 +377,12 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
                 self.puller = await asyncio.start_unix_server(
                     self.pull_handler, self.pull_path
                 )
+                os.chmod(self.pull_path, self.pull_path_perms)
         else:
             self.puller = await asyncio.start_server(
                 self.pull_handler, self.pull_host, self.pull_port
             )
+        self.started.set()
         while self._run.is_set():
             await asyncio.sleep(0.3)
         await self.server.stop()
diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py
index 478057232fa..3a0b225d378 100644
--- a/salt/transport/zeromq.py
+++ b/salt/transport/zeromq.py
@@ -7,6 +7,7 @@ import asyncio.exceptions
 import errno
 import hashlib
 import logging
+import multiprocessing
 import os
 import signal
 import sys
@@ -776,7 +777,7 @@ class ZeroMQSocketMonitor:
     async def consume(self):
         while self._running.is_set():
             try:
-                if self._monitor_socket.poll():
+                if await self._monitor_socket.poll():
                     msg = await self._monitor_socket.recv_multipart()
                     self.monitor_callback(msg)
                 else:
@@ -852,6 +853,9 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
         pull_host=None,
         pull_port=None,
         pull_path=None,
+        pull_path_perms=0o600,
+        pub_path_perms=0o600,
+        started=None,
     ):
         self.opts = opts
         self.pub_host = pub_host
@@ -864,6 +868,8 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
         self.pull_host = pull_host
         self.pull_port = pull_port
         self.pull_path = pull_path
+        self.pub_path_perms = pub_path_perms
+        self.pull_path_perms = pull_path_perms
         if pull_path:
             self.pull_uri = f"ipc://{pull_path}"
         else:
@@ -874,10 +880,31 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
         self.daemon_pub_sock = None
         self.daemon_pull_sock = None
         self.daemon_monitor = None
+        if started is None:
+            self.started = multiprocessing.Event()
+        else:
+            self.started = started
 
     def __repr__(self):
         return f"<PublishServer pub_uri={self.pub_uri} pull_uri={self.pull_uri} at {hex(id(self))}>"
 
+    def __setstate__(self, state):
+        self.__init__(**state)
+
+    def __getstate__(self):
+        return {
+            "opts": self.opts,
+            "pub_host": self.pub_host,
+            "pub_port": self.pub_port,
+            "pub_path": self.pub_path,
+            "pull_host": self.pull_host,
+            "pull_port": self.pull_port,
+            "pull_path": self.pull_path,
+            "pub_path_perms": self.pub_path_perms,
+            "pull_path_perms": self.pull_path_perms,
+            "started": self.started,
+        }
+
     def publish_daemon(
         self,
         publish_payload,
@@ -930,14 +957,14 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             if self.pub_path:
                 os.chmod(  # nosec
                     self.pub_path,
-                    0o600,
+                    self.pub_path_perms,
                 )
             log.info("Starting the Salt Puller on %s", self.pull_uri)
             pull_sock.bind(self.pull_uri)
             if self.pull_path:
                 os.chmod(  # nosec
                     self.pull_path,
-                    0o600,
+                    self.pull_path_perms,
                 )
         return pull_sock, pub_sock, monitor
 
@@ -950,6 +977,7 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
             self.daemon_pub_sock,
             self.daemon_monitor,
         ) = self._get_sockets(self.daemon_context, ioloop)
+        self.started.set()
         while True:
             try:
                 package = await self.daemon_pull_sock.recv()
diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py
index 316fc5c478d..2db45ec49bc 100644
--- a/salt/utils/asynchronous.py
+++ b/salt/utils/asynchronous.py
@@ -73,7 +73,8 @@ class SyncWrapper:
         self.cls = cls
         if loop_kwarg:
             kwargs[self.loop_kwarg] = self.io_loop
-        self.obj = cls(*args, **kwargs)
+        with current_ioloop(self.io_loop):
+            self.obj = cls(*args, **kwargs)
         self._async_methods = list(
             set(async_methods + getattr(self.obj, "async_methods", []))
         )
diff --git a/salt/utils/atomicfile.py b/salt/utils/atomicfile.py
index 5dfffbb83c2..a3bf2346804 100644
--- a/salt/utils/atomicfile.py
+++ b/salt/utils/atomicfile.py
@@ -11,6 +11,7 @@ import sys
 import tempfile
 import time
 
+import salt.utils.files
 import salt.utils.win_dacl
 
 CAN_RENAME_OPEN_FILE = False
@@ -128,15 +129,19 @@ class _AtomicWFile:
         if self._fh.closed:
             return
         self._fh.close()
-        if os.path.isfile(self._filename):
-            if salt.utils.win_dacl.HAS_WIN32:
+        if salt.utils.win_dacl.HAS_WIN32:
+            if os.path.isfile(self._filename):
                 salt.utils.win_dacl.copy_security(
                     source=self._filename, target=self._tmp_filename
                 )
-            else:
+        else:
+            if os.path.isfile(self._filename):
                 shutil.copymode(self._filename, self._tmp_filename)
                 st = os.stat(self._filename)
                 os.chown(self._tmp_filename, st.st_uid, st.st_gid)
+            else:
+                # chmod file to default mode based on umask
+                os.chmod(self._tmp_filename, 0o666 & ~salt.utils.files.get_umask())
         atomic_rename(self._tmp_filename, self._filename)
 
     def __exit__(self, exc_type, exc_value, traceback):
diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py
index 2217795833b..749b5000662 100644
--- a/salt/utils/cloud.py
+++ b/salt/utils/cloud.py
@@ -2964,7 +2964,10 @@ def update_bootstrap(config, url=None):
         - The absolute path to the bootstrap
         - The content of the bootstrap script
     """
-    default_url = config.get("bootstrap_script_url", "https://bootstrap.saltstack.com")
+    default_url = config.get(
+        "bootstrap_script_url",
+        "https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh",
+    )
     if not url:
         url = default_url
     if not url:
diff --git a/salt/utils/event.py b/salt/utils/event.py
index f99f83a0bf5..f6ccac8ed02 100644
--- a/salt/utils/event.py
+++ b/salt/utils/event.py
@@ -76,7 +76,7 @@ import salt.utils.platform
 import salt.utils.process
 import salt.utils.stringutils
 import salt.utils.zeromq
-from salt.exceptions import SaltDeserializationError
+from salt.exceptions import SaltDeserializationError, SaltInvocationError
 from salt.utils.versions import warn_until
 
 log = logging.getLogger(__name__)
@@ -563,6 +563,9 @@ class SaltEvent:
             try:
                 if not self.cpub and not self.connect_pub(timeout=wait):
                     break
+                if not self._run_io_loop_sync:
+                    log.error("Trying to get event with async subscriber")
+                    raise SaltInvocationError("get_event needs synchronous subscriber")
                 raw = self.subscriber.recv(timeout=wait)
                 if raw is None:
                     break
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
index 9feda7a1677..46b127bfcf7 100644
--- a/salt/utils/gitfs.py
+++ b/salt/utils/gitfs.py
@@ -487,11 +487,15 @@ class GitProvider:
             ).replace(
                 "/", "_"
             )  # replace "/" with "_" to not cause trouble with file system
+
         self._cache_hash = salt.utils.path.join(cache_root, self._cache_basehash)
         self._cache_basename = "_"
         if self.id.startswith("__env__"):
             try:
-                self._cache_basename = self.get_checkout_target()
+                self._cache_basename = self.get_checkout_target().replace(
+                    "/", "-"
+                )  # replace '/' with '-' to not cause trouble with file-system
+
             except AttributeError:
                 log.critical(
                     "__env__ cant generate basename: %s %s", self.role, self.id
@@ -529,7 +533,6 @@ class GitProvider:
         if HAS_PSUTIL:
             cur_pid = os.getpid()
             process = psutil.Process(cur_pid)
-            dgm_process_dir = dir(process)
             cache_dir = self.opts.get("cachedir", None)
             gitfs_active = self.opts.get("gitfs_remotes", None)
             if cache_dir and gitfs_active:
@@ -1567,12 +1570,14 @@ class GitPython(GitProvider):
         local copy was already up-to-date, return False.
         """
         origin = self.repo.remotes[0]
+
         try:
             fetch_results = origin.fetch()
         except AssertionError:
             fetch_results = origin.fetch()
 
         new_objs = False
+
         for fetchinfo in fetch_results:
             if fetchinfo.old_commit is not None:
                 log.debug(
@@ -1781,7 +1786,7 @@ class Pygit2(GitProvider):
             return None
 
         try:
-            head_sha = self.peel(local_head).hex
+            head_sha = str(self.peel(local_head).id)
         except AttributeError:
             # Shouldn't happen, but just in case a future pygit2 API change
             # breaks things, avoid a traceback and log an error.
@@ -1804,7 +1809,7 @@ class Pygit2(GitProvider):
                     # remote ref.
                     self.repo.checkout(checkout_ref)
                     if branch:
-                        self.repo.reset(oid, pygit2.GIT_RESET_HARD)
+                        self.repo.reset(pygit2_id, pygit2.GIT_RESET_HARD)
                 return True
             except GitLockError as exc:
                 if exc.errno == errno.EEXIST:
@@ -1833,14 +1838,17 @@ class Pygit2(GitProvider):
                 tag_ref = "refs/tags/" + tgt_ref
             if remote_ref in refs:
                 # Get commit id for the remote ref
-                oid = self.peel(self.repo.lookup_reference(remote_ref)).id
+                pygit2_id = self.peel(self.repo.lookup_reference(remote_ref)).id
                 if local_ref not in refs:
                     # No local branch for this remote, so create one and point
                     # it at the commit id of the remote ref
-                    self.repo.create_reference(local_ref, oid)
+                    self.repo.create_reference(local_ref, pygit2_id)
 
                 try:
-                    target_sha = self.peel(self.repo.lookup_reference(remote_ref)).hex
+                    target_sha = str(
+                        self.peel(self.repo.lookup_reference(remote_ref)).id
+                    )
+
                 except KeyError:
                     log.error(
                         "pygit2 was unable to get SHA for %s in %s remote '%s'",
@@ -1853,6 +1861,7 @@ class Pygit2(GitProvider):
 
                 # Only perform a checkout if HEAD and target are not pointing
                 # at the same SHA1.
+
                 if head_sha != target_sha:
                     # Check existence of the ref in refs/heads/ which
                     # corresponds to the local HEAD. Checking out local_ref
@@ -1868,7 +1877,8 @@ class Pygit2(GitProvider):
                     # cachedir).
                     head_ref = local_head.target
                     # If head_ref is not a string, it will point to a
-                    # pygit2.Oid object and we are in detached HEAD mode.
+                    # pygit2.id object (oid is deprecated and removed) and
+                    # we are in detached HEAD mode.
                     # Therefore, there is no need to add a local reference. If
                     # head_ref == local_ref, then the local reference for HEAD
                     # in refs/heads/ already exists and again, no need to add.
@@ -1920,10 +1930,11 @@ class Pygit2(GitProvider):
                 else:
                     try:
                         # If no AttributeError raised, this is an annotated tag
-                        tag_sha = tag_obj.target.hex
+                        tag_sha = str(tag_obj.target.id)
+
                     except AttributeError:
                         try:
-                            tag_sha = tag_obj.hex
+                            tag_sha = str(tag_obj.id)
                         except AttributeError:
                             # Shouldn't happen, but could if a future pygit2
                             # API change breaks things.
@@ -2037,10 +2048,10 @@ class Pygit2(GitProvider):
             the empty directories within it in the "blobs" list
             """
             for entry in iter(tree):
-                if entry.oid not in self.repo:
+                if entry.id not in self.repo:
                     # Entry is a submodule, skip it
                     continue
-                blob = self.repo[entry.oid]
+                blob = self.repo[entry.id]
                 if not isinstance(blob, pygit2.Tree):
                     continue
                 blobs.append(
@@ -2059,8 +2070,8 @@ class Pygit2(GitProvider):
             return ret
         if self.root(tgt_env):
             try:
-                oid = tree[self.root(tgt_env)].oid
-                tree = self.repo[oid]
+                pygit2_id = tree[self.root(tgt_env)].id
+                tree = self.repo[pygit2_id]
             except KeyError:
                 return ret
             if not isinstance(tree, pygit2.Tree):
@@ -2105,6 +2116,7 @@ class Pygit2(GitProvider):
         origin = self.repo.remotes[0]
         refs_pre = self.repo.listall_references()
         fetch_kwargs = {}
+
         # pygit2 radically changed fetchiing in 0.23.2
         if self.remotecallbacks is not None:
             fetch_kwargs["callbacks"] = self.remotecallbacks
@@ -2118,6 +2130,7 @@ class Pygit2(GitProvider):
             pass
         try:
             fetch_results = origin.fetch(**fetch_kwargs)
+
         except GitError as exc:  # pylint: disable=broad-except
             exc_str = get_error_message(exc).lower()
             if "unsupported url protocol" in exc_str and isinstance(
@@ -2156,6 +2169,7 @@ class Pygit2(GitProvider):
             # pygit2.Remote.fetch() returns a class instance in
             # pygit2 >= 0.21.0
             received_objects = fetch_results.received_objects
+
         if received_objects != 0:
             log.debug(
                 "%s received %s objects for remote '%s'",
@@ -2167,6 +2181,7 @@ class Pygit2(GitProvider):
             log.debug("%s remote '%s' is up-to-date", self.role, self.id)
         refs_post = self.repo.listall_references()
         cleaned = self.clean_stale_refs(local_refs=refs_post)
+
         return True if (received_objects or refs_pre != refs_post or cleaned) else None
 
     def file_list(self, tgt_env):
@@ -2180,17 +2195,17 @@ class Pygit2(GitProvider):
             the file paths and symlink info in the "blobs" dict
             """
             for entry in iter(tree):
-                if entry.oid not in self.repo:
+                if entry.id not in self.repo:
                     # Entry is a submodule, skip it
                     continue
-                obj = self.repo[entry.oid]
+                obj = self.repo[entry.id]
                 if isinstance(obj, pygit2.Blob):
                     repo_path = salt.utils.path.join(
                         prefix, entry.name, use_posixpath=True
                     )
                     blobs.setdefault("files", []).append(repo_path)
                     if stat.S_ISLNK(tree[entry.name].filemode):
-                        link_tgt = self.repo[tree[entry.name].oid].data
+                        link_tgt = self.repo[tree[entry.name].id].data
                         blobs.setdefault("symlinks", {})[repo_path] = link_tgt
                 elif isinstance(obj, pygit2.Tree):
                     _traverse(
@@ -2209,8 +2224,8 @@ class Pygit2(GitProvider):
             try:
                 # This might need to be changed to account for a root that
                 # spans more than one directory
-                oid = tree[self.root(tgt_env)].oid
-                tree = self.repo[oid]
+                pygit2_id = tree[self.root(tgt_env)].id
+                tree = self.repo[pygit2_id]
             except KeyError:
                 return files, symlinks
             if not isinstance(tree, pygit2.Tree):
@@ -2263,12 +2278,12 @@ class Pygit2(GitProvider):
                     # path's object ID will be the target of the symlink. Follow
                     # the symlink and set path to the location indicated
                     # in the blob data.
-                    link_tgt = self.repo[entry.oid].data
+                    link_tgt = self.repo[entry.id].data
                     path = salt.utils.path.join(
                         os.path.dirname(path), link_tgt, use_posixpath=True
                     )
                 else:
-                    blob = self.repo[entry.oid]
+                    blob = self.repo[entry.id]
                     if isinstance(blob, pygit2.Tree):
                         # Path is a directory, not a file.
                         blob = None
@@ -2277,7 +2292,7 @@ class Pygit2(GitProvider):
                 blob = None
                 break
         if isinstance(blob, pygit2.Blob):
-            return blob, blob.hex, mode
+            return blob, str(blob.id), mode
         return None, None, None
 
     def get_tree_from_branch(self, ref):
@@ -3480,6 +3495,7 @@ class GitPillar(GitBase):
         """
         self.pillar_dirs = OrderedDict()
         self.pillar_linked_dirs = []
+
         for repo in self.remotes:
             cachedir = self.do_checkout(repo, fetch_on_fail=fetch_on_fail)
             if cachedir is not None:
diff --git a/salt/utils/nacl.py b/salt/utils/nacl.py
index cac3455d1a6..63d97e6f9ab 100644
--- a/salt/utils/nacl.py
+++ b/salt/utils/nacl.py
@@ -182,12 +182,12 @@ def keygen(sk_file=None, pk_file=None, **kwargs):
         with salt.utils.files.fopen(sk_file, "rb") as keyf:
             sk = salt.utils.stringutils.to_unicode(keyf.read()).rstrip("\n")
             sk = base64.b64decode(sk)
-        kp = nacl.public.PublicKey(sk)
+        kp = nacl.public.PrivateKey(sk)
         with salt.utils.files.fopen(pk_file, "wb") as keyf:
-            keyf.write(base64.b64encode(kp.encode()))
+            keyf.write(base64.b64encode(kp.public_key.encode()))
         return f"saved pk_file: {pk_file}"
 
-    kp = nacl.public.PublicKey.generate()
+    kp = nacl.public.PrivateKey.generate()
     with salt.utils.files.fopen(sk_file, "wb") as keyf:
         keyf.write(base64.b64encode(kp.encode()))
     if salt.utils.platform.is_windows():
@@ -200,7 +200,7 @@ def keygen(sk_file=None, pk_file=None, **kwargs):
         # chmod 0600 file
         os.chmod(sk_file, 1536)
     with salt.utils.files.fopen(pk_file, "wb") as keyf:
-        keyf.write(base64.b64encode(kp.encode()))
+        keyf.write(base64.b64encode(kp.public_key.encode()))
     return f"saved sk_file:{sk_file}  pk_file: {pk_file}"
 
 
diff --git a/salt/utils/network.py b/salt/utils/network.py
index b415f572750..69384819d3b 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -1740,7 +1740,13 @@ def _netlink_tool_remote_on(port, which_end):
             continue
         if which_end == "local_port" and int(local_port) != int(port):
             continue
-        remotes.add(remote_host.strip("[]"))
+
+        # Interpret IPv4-mapped IPv6 addresses as IPv4 (strip prefix)
+        remote_host = remote_host.strip("[]").lower()
+        if remote_host.startswith("::ffff:"):
+            remote_host = remote_host[7:]
+
+        remotes.add(remote_host)
 
     if valid is False:
         remotes = None
diff --git a/salt/utils/odict.py b/salt/utils/odict.py
index 2834f1d9246..11a3f3a3097 100644
--- a/salt/utils/odict.py
+++ b/salt/utils/odict.py
@@ -62,3 +62,8 @@ class DefaultOrderedDict(OrderedDict):
         return "DefaultOrderedDict({}, {})".format(
             self.default_factory, super().__repr__()
         )
+
+
+class HashableOrderedDict(OrderedDict):
+    def __hash__(self):
+        return id(self)
diff --git a/salt/utils/path.py b/salt/utils/path.py
index eec79d7ef7b..81fb64e6734 100644
--- a/salt/utils/path.py
+++ b/salt/utils/path.py
@@ -203,7 +203,7 @@ def which(exe=None):
 
     # now to search through our system_path
     for path in system_path:
-        p = join(path, exe)
+        p = join(os.path.expandvars(path), exe)
 
         # iterate through all extensions to see which one is executable
         for ext in pathext:
diff --git a/salt/utils/platform.py b/salt/utils/platform.py
index 100918b72d5..59a04b451bc 100644
--- a/salt/utils/platform.py
+++ b/salt/utils/platform.py
@@ -228,7 +228,11 @@ def is_aarch64():
     """
     Simple function to return if host is AArch64 or not
     """
-    return platform.machine().startswith("aarch64")
+    if is_darwin():
+        # Allow for MacOS Arm64 platform returning differently from Linux
+        return platform.machine().startswith("arm64")
+    else:
+        return platform.machine().startswith("aarch64")
 
 
 def spawning_platform():
diff --git a/salt/utils/process.py b/salt/utils/process.py
index cdda5df02a5..3aa43e76ce6 100644
--- a/salt/utils/process.py
+++ b/salt/utils/process.py
@@ -1169,10 +1169,17 @@ class SubprocessList:
 
     def cleanup(self):
         with self.lock:
-            for proc in self.processes:
-                if proc.is_alive():
-                    continue
-                proc.join()
+            for proc in self.processes[:]:
+                proc.join(0.01)
+                if hasattr(proc, "exitcode"):
+                    # Only processes have exitcode and a close method, threads
+                    # do not.
+                    if proc.exitcode is None:
+                        continue
+                    proc.close()
+                else:
+                    if proc.is_alive():
+                        continue
                 self.processes.remove(proc)
                 self.count -= 1
                 log.debug("Subprocess %s cleaned up", proc.name)
diff --git a/salt/utils/saltclass.py b/salt/utils/saltclass.py
index 7d6fec7c578..c18d01af3d1 100644
--- a/salt/utils/saltclass.py
+++ b/salt/utils/saltclass.py
@@ -1,3 +1,4 @@
+import copy
 import glob
 import logging
 import os
@@ -5,6 +6,7 @@ import re
 
 from jinja2 import Environment, FileSystemLoader
 
+import salt.utils.odict
 import salt.utils.path
 import salt.utils.yaml
 
@@ -277,9 +279,27 @@ def expand_classes_glob(classes, salt_data):
     return expanded_classes
 
 
-def expand_classes_in_order(
-    minion_dict, salt_data, seen_classes, expanded_classes, classes_to_expand
-):
+def expand_classes_in_order(minion_dict, salt_data, seen_classes, classes_to_expand):
+    """
+    Expand the list of `classes_to_expand` and return them in the order found
+
+    The return order is `[C, B, A, M, B, L, MINION_ID]` when:
+
+    - minion node include classes `A` and `L`
+    - `A` include class `B`
+    - `B` include class `C`
+    - `L` include class `M` and `B`
+
+    :param dict minion_dict: definition of minion node
+    :param dict salt_data: configuration data
+    :param iterable(str) seen_classes: classes already processed
+    :param iterable(str) classes_to_expand: classes to recursivly expand
+    :return: Expanded classes in proper order
+    :rtype: salt.utils.odict.OrderedDict
+    """
+
+    expanded_classes = salt.utils.odict.OrderedDict()
+
     # Get classes to expand from minion dictionary
     if not classes_to_expand and "classes" in minion_dict:
         classes_to_expand = minion_dict["classes"]
@@ -290,71 +310,37 @@ def expand_classes_in_order(
     for klass in classes_to_expand:
         if klass not in seen_classes:
             seen_classes.append(klass)
-            expanded_classes[klass] = get_class(klass, salt_data)
+            klass_dict = salt.utils.odict.OrderedDict(
+                {klass: get_class(klass, salt_data)}
+            )
             # Fix corner case where class is loaded but doesn't contain anything
-            if expanded_classes[klass] is None:
-                expanded_classes[klass] = {}
+            if klass_dict[klass] is None:
+                klass_dict[klass] = {}
 
             # Merge newly found pillars into existing ones
-            new_pillars = expanded_classes[klass].get("pillars", {})
+            new_pillars = klass_dict[klass].get("pillars", {})
             if new_pillars:
                 dict_merge(salt_data["__pillar__"], new_pillars)
 
-            # Now replace class element in classes_to_expand by expansion
-            if expanded_classes[klass].get("classes"):
-                l_id = classes_to_expand.index(klass)
-                classes_to_expand[l_id:l_id] = expanded_classes[klass]["classes"]
-                expand_classes_in_order(
-                    minion_dict,
+            if "classes" in klass_dict[klass]:
+                nested_classes = expand_classes_in_order(
+                    {},
                     salt_data,
                     seen_classes,
-                    expanded_classes,
-                    classes_to_expand,
-                )
-            else:
-                expand_classes_in_order(
-                    minion_dict,
-                    salt_data,
-                    seen_classes,
-                    expanded_classes,
-                    classes_to_expand,
+                    klass_dict[klass].get("classes", {}),
                 )
 
-    # We may have duplicates here and we want to remove them
-    tmp = []
-    for t_element in classes_to_expand:
-        if t_element not in tmp:
-            tmp.append(t_element)
+                # Put current class after nested classes
+                klass_dict.update(nested_classes)
+                klass_dict.move_to_end(klass)
 
-    classes_to_expand = tmp
+            expanded_classes.update(klass_dict)
 
-    # Now that we've retrieved every class in order,
-    # let's return an ordered list of dicts
-    ord_expanded_classes = []
-    ord_expanded_states = []
-    for ord_klass in classes_to_expand:
-        ord_expanded_classes.append(expanded_classes[ord_klass])
-        # And be smart and sort out states list
-        # Address the corner case where states is empty in a class definition
-        if (
-            "states" in expanded_classes[ord_klass]
-            and expanded_classes[ord_klass]["states"] is None
-        ):
-            expanded_classes[ord_klass]["states"] = {}
+    # Minion dict must be at the end
+    if minion_dict:
+        expanded_classes.update({salt_data["minion_id"]: minion_dict})
 
-        if "states" in expanded_classes[ord_klass]:
-            ord_expanded_states.extend(expanded_classes[ord_klass]["states"])
-
-    # Add our minion dict as final element but check if we have states to process
-    if "states" in minion_dict and minion_dict["states"] is None:
-        minion_dict["states"] = []
-
-    if "states" in minion_dict:
-        ord_expanded_states.extend(minion_dict["states"])
-
-    ord_expanded_classes.append(minion_dict)
-
-    return ord_expanded_classes, classes_to_expand, ord_expanded_states
+    return expanded_classes
 
 
 def expanded_dict_from_minion(minion_id, salt_data):
@@ -377,22 +363,35 @@ def expanded_dict_from_minion(minion_id, salt_data):
         node_dict[minion_id] = {}
 
     # Merge newly found pillars into existing ones
-    dict_merge(salt_data["__pillar__"], node_dict[minion_id].get("pillars", {}))
+    dict_merge(
+        salt_data["__pillar__"], copy.deepcopy(node_dict[minion_id]).get("pillars", {})
+    )
 
     # Get 2 ordered lists:
     # expanded_classes: A list of all the dicts
     # classes_list: List of all the classes
-    expanded_classes, classes_list, states_list = expand_classes_in_order(
-        node_dict[minion_id], salt_data, [], {}, []
-    )
+    expanded_classes = expand_classes_in_order(node_dict[minion_id], salt_data, [], [])
 
     # Here merge the pillars together
     pillars_dict = {}
-    for exp_dict in expanded_classes:
+    states_list = []
+    classes_list = list(expanded_classes.keys())[:-1]
+    classes_values = list(expanded_classes.values())
+    for exp_dict in classes_values:
         if "pillars" in exp_dict:
             dict_merge(pillars_dict, exp_dict)
+        if "states" in exp_dict:
+            states_list.extend(exp_dict["states"])
 
-    return expanded_classes, pillars_dict, classes_list, states_list
+    # Avoid duplicates, keep first
+    state_seen = set()
+    states_list = [
+        state
+        for state in states_list
+        if not (state in state_seen or state_seen.add(state))
+    ]
+
+    return classes_values, pillars_dict, classes_list, states_list
 
 
 def get_pillars(minion_id, salt_data):
diff --git a/salt/utils/systemd.py b/salt/utils/systemd.py
index 9b079ade0a0..df7509cd437 100644
--- a/salt/utils/systemd.py
+++ b/salt/utils/systemd.py
@@ -76,6 +76,32 @@ def offline(context=None):
     return ret
 
 
+def status(context=None):
+    """Return True if systemd status succeeds. When False, the system may have
+    been booted with systemd but systemd is in a degraded state.
+
+    .. versionadded:: 3006.0
+    """
+    contextkey = "salt.utils.systemd.status"
+    if isinstance(context, (dict, salt.loader.context.NamedLoaderContext)):
+        # Can't put this if block on the same line as the above if block,
+        # because it will break the elif below.
+        if contextkey in context:
+            return context[contextkey]
+    elif context is not None:
+        raise SaltInvocationError("context must be a dictionary if passed")
+    proc = subprocess.run(
+        ["systemctl", "status"],
+        check=False,
+        capture_output=True,
+    )
+    ret = (
+        b"Failed to get D-Bus connection: No such file or directory" not in proc.stderr
+    )
+    context[contextkey] = ret
+    return ret
+
+
 def version(context=None):
     """
     Attempts to run systemctl --version. Returns None if unable to determine
@@ -123,7 +149,10 @@ def has_scope(context=None):
     _sd_version = version(context)
     if _sd_version is None:
         return False
-    return _sd_version >= 205
+    if status(context):
+        return _sd_version >= 205
+    else:
+        return False
 
 
 def pid_to_service(pid):
@@ -143,7 +172,10 @@ def _pid_to_service_systemctl(pid):
     systemd_cmd = ["systemctl", "--output", "json", "status", str(pid)]
     try:
         systemd_output = subprocess.run(
-            systemd_cmd, check=True, text=True, capture_output=True
+            systemd_cmd,
+            check=True,
+            text=True,
+            capture_output=True,
         )
         status_json = salt.utils.json.find_json(systemd_output.stdout)
     except (ValueError, subprocess.CalledProcessError):
diff --git a/salt/utils/timed_subprocess.py b/salt/utils/timed_subprocess.py
index 627d3f712ed..c41d1a7377b 100644
--- a/salt/utils/timed_subprocess.py
+++ b/salt/utils/timed_subprocess.py
@@ -33,9 +33,8 @@ class TimedProc:
             if not self.stdin_raw_newlines:
                 # Translate a newline submitted as '\n' on the CLI to an actual
                 # newline character.
-                self.stdin = salt.utils.stringutils.to_bytes(
-                    self.stdin.replace("\\n", "\n")
-                )
+                self.stdin = self.stdin.replace("\\n", "\n")
+            self.stdin = salt.utils.stringutils.to_bytes(self.stdin)
             kwargs["stdin"] = subprocess.PIPE
 
         if not self.with_communicate:
diff --git a/salt/utils/verify.py b/salt/utils/verify.py
index 85d9e568390..947102135ad 100644
--- a/salt/utils/verify.py
+++ b/salt/utils/verify.py
@@ -532,7 +532,7 @@ def clean_path(root, path, subdir=False, realpath=True):
     Pass realpath=False if filesystem links should not be resolved.
     """
     if not os.path.isabs(root):
-        return ""
+        root = os.path.join(os.getcwd(), root)
     root = os.path.normpath(root)
     if not os.path.isabs(path):
         path = os.path.join(root, path)
diff --git a/salt/utils/win_lgpo_netsh.py b/salt/utils/win_lgpo_netsh.py
index 6f54d4e25b7..c463061b88c 100644
--- a/salt/utils/win_lgpo_netsh.py
+++ b/salt/utils/win_lgpo_netsh.py
@@ -110,20 +110,38 @@ def _get_inbound_text(rule, action):
     The "Inbound connections" setting is a combination of 2 parameters:
 
     - AllowInboundRules
+      0 = False
+      1 = True
+      2 = NotConfigured
+      I don't see a way to set "AllowInboundRules" outside of PowerShell
+
     - DefaultInboundAction
+      0 = Not Configured
+      2 = Allow Inbound
+      4 = Block Inbound
 
     The settings are as follows:
 
     Rules Action
+    0     4       BlockInboundAlways
+    1     0       NotConfigured
+    1     2       AllowInbound
+    1     4       BlockInbound
+    2     0       NotConfigured
     2     2       AllowInbound
     2     4       BlockInbound
-    0     4       BlockInboundAlways
-    2     0       NotConfigured
     """
     settings = {
         0: {
+            0: "NotConfigured",
+            2: "AllowInbound",
             4: "BlockInboundAlways",
         },
+        1: {
+            0: "NotConfigured",
+            2: "AllowInbound",
+            4: "BlockInbound",
+        },
         2: {
             0: "NotConfigured",
             2: "AllowInbound",
@@ -143,6 +161,30 @@ def _get_inbound_settings(text):
     return settings[text.lower()]
 
 
+def _get_all_settings(profile, store="local"):
+    # Get current settings using PowerShell
+    # if "lgpo.firewall_profile_settings" not in __context__:
+    cmd = ["Get-NetFirewallProfile"]
+    if profile:
+        cmd.append(profile)
+    if store.lower() == "lgpo":
+        cmd.extend(["-PolicyStore", "localhost"])
+
+    # Run the command and get dict
+    settings = salt.utils.win_pwsh.run_dict(cmd)
+
+    # A successful run should return a dictionary
+    if not settings:
+        raise CommandExecutionError("LGPO NETSH: An unknown error occurred")
+
+    # Remove the junk
+    for setting in list(settings.keys()):
+        if setting.startswith("Cim"):
+            settings.pop(setting)
+
+    return settings
+
+
 def get_settings(profile, section, store="local"):
     """
     Get the firewall property from the specified profile in the specified store
@@ -190,24 +232,7 @@ def get_settings(profile, section, store="local"):
     if store.lower() not in ("local", "lgpo"):
         raise ValueError(f"Incorrect store: {store}")
 
-    # Build the powershell command
-    cmd = ["Get-NetFirewallProfile"]
-    if profile:
-        cmd.append(profile)
-    if store and store.lower() == "lgpo":
-        cmd.extend(["-PolicyStore", "localhost"])
-
-    # Run the command
-    settings = salt.utils.win_pwsh.run_dict(cmd)
-
-    # A successful run should return a dictionary
-    if not settings:
-        raise CommandExecutionError("LGPO NETSH: An unknown error occurred")
-
-    # Remove the junk
-    for setting in list(settings.keys()):
-        if setting.startswith("Cim"):
-            settings.pop(setting)
+    settings = _get_all_settings(profile=profile, store=store)
 
     # Make it look like netsh output
     ret_settings = {
@@ -299,24 +324,7 @@ def get_all_settings(profile, store="local"):
     if store.lower() not in ("local", "lgpo"):
         raise ValueError(f"Incorrect store: {store}")
 
-    # Build the powershell command
-    cmd = ["Get-NetFirewallProfile"]
-    if profile:
-        cmd.append(profile)
-    if store and store.lower() == "lgpo":
-        cmd.extend(["-PolicyStore", "localhost"])
-
-    # Run the command
-    settings = salt.utils.win_pwsh.run_dict(cmd)
-
-    # A successful run should return a dictionary
-    if not settings:
-        raise CommandExecutionError("LGPO NETSH: An unknown error occurred")
-
-    # Remove the junk
-    for setting in list(settings.keys()):
-        if setting.startswith("Cim"):
-            settings.pop(setting)
+    settings = _get_all_settings(profile=profile, store=store)
 
     # Make it look like netsh output
     ret_settings = {
@@ -409,6 +417,9 @@ def set_firewall_settings(profile, inbound=None, outbound=None, store="local"):
         raise ValueError(f"Incorrect outbound value: {outbound}")
     if not inbound and not outbound:
         raise ValueError("Must set inbound or outbound")
+
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-allowinboundrules
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-defaultoutboundaction
     if store == "local":
         if inbound and inbound.lower() == "notconfigured":
             msg = "Cannot set local inbound policies as NotConfigured"
@@ -417,16 +428,26 @@ def set_firewall_settings(profile, inbound=None, outbound=None, store="local"):
             msg = "Cannot set local outbound policies as NotConfigured"
             raise CommandExecutionError(msg)
 
+    # Get current settings
+    settings = _get_all_settings(profile=profile, store=store)
+
     # Build the powershell command
     cmd = ["Set-NetFirewallProfile"]
     if profile:
         cmd.append(profile)
-    if store and store.lower() == "lgpo":
+    if store.lower() == "lgpo":
         cmd.extend(["-PolicyStore", "localhost"])
 
     # Get inbound settings
     if inbound:
         in_rule, in_action = _get_inbound_settings(inbound.lower())
+        # If current AllowInboundRules is set (1 or 2) and new AllowInboundRules is 2
+        # We want to just keep the current setting.
+        # We don't have a way in LGPO to set the AllowInboundRules. I can't find it in
+        # gpedit.msc either. Not sure how to set it outside of PowerShell
+        current_in_rule = settings["AllowInboundRules"]
+        if current_in_rule > 0 and in_rule == 2:
+            in_rule = current_in_rule
         cmd.extend(["-AllowInboundRules", in_rule, "-DefaultInboundAction", in_action])
 
     if outbound:
@@ -509,10 +530,6 @@ def set_logging_settings(profile, setting, value, store="local"):
     # Input validation
     if profile.lower() not in ("domain", "public", "private"):
         raise ValueError(f"Incorrect profile: {profile}")
-    if store == "local":
-        if str(value).lower() == "notconfigured":
-            msg = "Cannot set local policies as NotConfigured"
-            raise CommandExecutionError(msg)
     if setting.lower() not in (
         "allowedconnections",
         "droppedconnections",
@@ -520,6 +537,18 @@ def set_logging_settings(profile, setting, value, store="local"):
         "maxfilesize",
     ):
         raise ValueError(f"Incorrect setting: {setting}")
+
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-logallowed
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-logblocked
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-logmaxsizekilobytes
+    if str(value).lower() == "notconfigured" and store.lower() == "local":
+        if setting in ["allowedconnections", "droppedconnections", "maxfilesize"]:
+            raise CommandExecutionError(
+                "NotConfigured only valid when setting Group Policy"
+            )
+    if setting == "maxfilesize" and str(value).lower() == "notconfigured":
+        raise CommandExecutionError(f"NotConfigured not a valid option for {setting}")
+
     settings = {"filename": ["-LogFileName", value]}
     if setting.lower() in ("allowedconnections", "droppedconnections"):
         if value.lower() not in ("enable", "disable", "notconfigured"):
@@ -588,7 +617,7 @@ def set_settings(profile, setting, value, store="local"):
 
             - enable
             - disable
-            - notconfigured
+            - notconfigured  <== lgpo only
 
         store (str):
             The store to use. This is either the local firewall policy or the
@@ -618,20 +647,19 @@ def set_settings(profile, setting, value, store="local"):
         raise ValueError(f"Incorrect setting: {setting}")
     if value.lower() not in ("enable", "disable", "notconfigured"):
         raise ValueError(f"Incorrect value: {value}")
-    if setting.lower() in ["localfirewallrules", "localconsecrules"]:
-        if store.lower() != "lgpo":
-            msg = f"{setting} can only be set using Group Policy"
-            raise CommandExecutionError(msg)
-    if setting.lower() == "inboundusernotification" and store.lower() != "lgpo":
-        if value.lower() == "notconfigured":
-            msg = "NotConfigured is only valid when setting group policy"
-            raise CommandExecutionError(msg)
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-allowlocalfirewallrules
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-allowlocalipsecrules
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-allowunicastresponsetomulticast
+    # https://learn.microsoft.com/en-us/powershell/module/netsecurity/set-netfirewallprofile?view=windowsserver2025-ps#-notifyonlisten
+    if value.lower() == "notconfigured" and store.lower() == "local":
+        msg = "NotConfigured is only valid when setting group policy"
+        raise CommandExecutionError(msg)
 
     # Build the powershell command
     cmd = ["Set-NetFirewallProfile"]
     if profile:
         cmd.append(profile)
-    if store and store.lower() == "lgpo":
+    if store.lower() == "lgpo":
         cmd.extend(["-PolicyStore", "localhost"])
 
     settings = {
@@ -706,7 +734,7 @@ def set_state(profile, state, store="local"):
     cmd = ["Set-NetFirewallProfile"]
     if profile:
         cmd.append(profile)
-    if store and store.lower() == "lgpo":
+    if store.lower() == "lgpo":
         cmd.extend(["-PolicyStore", "localhost"])
 
     cmd.extend(["-Enabled", ON_OFF[state.lower()]])
diff --git a/salt/utils/win_update.py b/salt/utils/win_update.py
index dd54f213963..1c93c5b18ea 100644
--- a/salt/utils/win_update.py
+++ b/salt/utils/win_update.py
@@ -528,14 +528,18 @@ class WindowsUpdateAgent:
         found = updates.updates
 
         for update in self._updates:
+            # Some update objects seem to be empty or undefined. Those will be
+            # exposed here if they are missing these attributes
+            try:
+                if salt.utils.data.is_true(update.IsHidden) and skip_hidden:
+                    continue
 
-            if salt.utils.data.is_true(update.IsHidden) and skip_hidden:
-                continue
+                if salt.utils.data.is_true(update.IsInstalled) and skip_installed:
+                    continue
 
-            if salt.utils.data.is_true(update.IsInstalled) and skip_installed:
-                continue
-
-            if salt.utils.data.is_true(update.IsMandatory) and skip_mandatory:
+                if salt.utils.data.is_true(update.IsMandatory) and skip_mandatory:
+                    continue
+            except AttributeError:
                 continue
 
             # Windows 10 build 2004 introduced some problems with the
diff --git a/salt/utils/yamldumper.py b/salt/utils/yamldumper.py
index 8c6e40394a3..8e694ab4a76 100644
--- a/salt/utils/yamldumper.py
+++ b/salt/utils/yamldumper.py
@@ -13,7 +13,7 @@ import collections
 import yaml  # pylint: disable=blacklisted-import
 
 import salt.utils.context
-from salt.utils.odict import OrderedDict
+from salt.utils.odict import HashableOrderedDict, OrderedDict
 
 try:
     from yaml import CDumper as Dumper
@@ -71,7 +71,9 @@ def represent_undefined(dumper, data):
 
 
 OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
+OrderedDumper.add_representer(HashableOrderedDict, represent_ordereddict)
 SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
+SafeOrderedDumper.add_representer(HashableOrderedDict, represent_ordereddict)
 SafeOrderedDumper.add_representer(None, represent_undefined)
 
 OrderedDumper.add_representer(
diff --git a/salt/version.py b/salt/version.py
index e52cf35c0d8..c8209d5e39f 100644
--- a/salt/version.py
+++ b/salt/version.py
@@ -80,7 +80,7 @@ class SaltVersionsInfo(type):
     SILICON       = SaltVersion("Silicon"      , info=3004,       released=True)
     PHOSPHORUS    = SaltVersion("Phosphorus"   , info=3005,       released=True)
     SULFUR        = SaltVersion("Sulfur"       , info=3006,       released=True)
-    CHLORINE      = SaltVersion("Chlorine"     , info=3007)
+    CHLORINE      = SaltVersion("Chlorine"     , info=3007,       released=True)
     ARGON         = SaltVersion("Argon"        , info=3008)
     POTASSIUM     = SaltVersion("Potassium"    , info=3009)
     CALCIUM       = SaltVersion("Calcium"      , info=3010)
diff --git a/tests/conftest.py b/tests/conftest.py
index 1d283a8bbc9..edf27fc5c20 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -16,6 +16,7 @@ import _pytest.logging
 import _pytest.skipping
 import more_itertools
 import pytest
+import pytestskipmarkers
 
 import salt
 import salt._logging
@@ -426,7 +427,8 @@ def pytest_itemcollected(item):
             pytest.fail(
                 "The test {!r} appears to be written for pytest but it's not under"
                 " {!r}. Please move it there.".format(
-                    item.nodeid, str(PYTESTS_DIR.relative_to(CODE_DIR)), pytrace=False
+                    item.nodeid,
+                    str(PYTESTS_DIR.relative_to(CODE_DIR)),
                 )
             )
 
@@ -801,6 +803,12 @@ def salt_factories_default_root_dir(salt_factories_default_root_dir):
         dictionary, then that's the value used, and not the one returned by
         this fixture.
     """
+    if os.environ.get("CI") and pytestskipmarkers.utils.platform.is_windows():
+        tempdir = pathlib.Path(
+            os.environ.get("RUNNER_TEMP", r"C:\Windows\Temp")
+        ).resolve()
+        return tempdir / "stsuite"
+
     return salt_factories_default_root_dir / "stsuite"
 
 
diff --git a/tests/filename_map.yml b/tests/filename_map.yml
index ed834192486..620135b7c25 100644
--- a/tests/filename_map.yml
+++ b/tests/filename_map.yml
@@ -133,7 +133,7 @@ salt/engines/*:
   - pytests.unit.engines.test_engines
 
 salt/grains/*:
-  - integration.grains.test_custom
+  - pytests.integration.grains.test_custom
 
 salt/matchers/*:
   - integration.states.test_match
diff --git a/tests/integration/grains/test_custom.py b/tests/integration/grains/test_custom.py
deleted file mode 100644
index d99e88d1902..00000000000
--- a/tests/integration/grains/test_custom.py
+++ /dev/null
@@ -1,23 +0,0 @@
-"""
-Test the core grains
-"""
-
-import pytest
-
-from tests.support.case import ModuleCase
-
-
-@pytest.mark.windows_whitelisted
-class TestGrainsCore(ModuleCase):
-    """
-    Test the core grains grains
-    """
-
-    @pytest.mark.slow_test
-    def test_grains_passed_to_custom_grain(self):
-        """
-        test if current grains are passed to grains module functions that have a grains argument
-        """
-        self.assertEqual(
-            self.run_function("grains.get", ["custom_grain_test"]), "itworked"
-        )
diff --git a/tests/integration/modules/test_cp.py b/tests/integration/modules/test_cp.py
index ef88590a9d5..31cf8a18144 100644
--- a/tests/integration/modules/test_cp.py
+++ b/tests/integration/modules/test_cp.py
@@ -231,12 +231,15 @@ class CPModuleTest(ModuleCase):
         """
         cp.get_url with https:// source given
         """
-        self.run_function("cp.get_url", ["https://repo.saltproject.io/index.html", tgt])
+        self.run_function(
+            "cp.get_url",
+            ["https://packages.broadcom.com/artifactory/saltproject-generic/", tgt],
+        )
         with salt.utils.files.fopen(tgt, "r") as instructions:
             data = salt.utils.stringutils.to_unicode(instructions.read())
-        self.assertIn("Salt Project", data)
-        self.assertIn("Package", data)
-        self.assertIn("Repo", data)
+        self.assertIn("Index of saltproject", data)
+        self.assertIn("onedir", data)
+        self.assertIn("Artifactory Online Server", data)
         self.assertNotIn("AYBABTU", data)
 
     @pytest.mark.slow_test
@@ -245,14 +248,15 @@ class CPModuleTest(ModuleCase):
         cp.get_url with https:// source given and destination omitted.
         """
         ret = self.run_function(
-            "cp.get_url", ["https://repo.saltproject.io/index.html"]
+            "cp.get_url",
+            ["https://packages.broadcom.com/artifactory/saltproject-generic/"],
         )
 
         with salt.utils.files.fopen(ret, "r") as instructions:
             data = salt.utils.stringutils.to_unicode(instructions.read())
-        self.assertIn("Salt Project", data)
-        self.assertIn("Package", data)
-        self.assertIn("Repo", data)
+        self.assertIn("Index of saltproject", data)
+        self.assertIn("onedir", data)
+        self.assertIn("Artifactory Online Server", data)
         self.assertNotIn("AYBABTU", data)
 
     @pytest.mark.slow_test
@@ -266,16 +270,19 @@ class CPModuleTest(ModuleCase):
         tgt = None
         while time.time() - start <= timeout:
             ret = self.run_function(
-                "cp.get_url", ["https://repo.saltproject.io/index.html", tgt]
+                "cp.get_url",
+                ["https://packages.broadcom.com/artifactory/saltproject-generic/", tgt],
             )
             if ret.find("HTTP 599") == -1:
                 break
             time.sleep(sleep)
         if ret.find("HTTP 599") != -1:
-            raise Exception("https://repo.saltproject.io/index.html returned 599 error")
-        self.assertIn("Salt Project", ret)
-        self.assertIn("Package", ret)
-        self.assertIn("Repo", ret)
+            raise Exception(
+                "https://packages.broadcom.com/artifactory/saltproject-generic/ returned 599 error"
+            )
+        self.assertIn("Index of saltproject", ret)
+        self.assertIn("onedir", ret)
+        self.assertIn("Artifactory Online Server", ret)
         self.assertNotIn("AYBABTU", ret)
 
     @pytest.mark.slow_test
@@ -344,11 +351,11 @@ class CPModuleTest(ModuleCase):
         """
         cp.get_file_str with https:// source given
         """
-        src = "https://repo.saltproject.io/index.html"
+        src = "https://packages.broadcom.com/artifactory/saltproject-generic/"
         ret = self.run_function("cp.get_file_str", [src])
-        self.assertIn("Salt Project", ret)
-        self.assertIn("Package", ret)
-        self.assertIn("Repo", ret)
+        self.assertIn("Index of saltproject", ret)
+        self.assertIn("onedir", ret)
+        self.assertIn("Artifactory Online Server", ret)
         self.assertNotIn("AYBABTU", ret)
 
     @pytest.mark.slow_test
diff --git a/tests/integration/modules/test_file.py b/tests/integration/modules/test_file.py
index 98ad9f3af45..1d352778c34 100644
--- a/tests/integration/modules/test_file.py
+++ b/tests/integration/modules/test_file.py
@@ -7,6 +7,7 @@ import pytest
 
 import salt.utils.files
 import salt.utils.platform
+import salt.utils.stringutils
 from tests.support.case import ModuleCase
 from tests.support.helpers import requires_system_grains
 from tests.support.runtests import RUNTIME_VARS
@@ -199,7 +200,7 @@ class FileModuleTest(ModuleCase):
         assert ret["retcode"] == 0, repr(ret)
         with salt.utils.files.fopen(src_file) as fp:
             self.assertEqual(
-                salt.utils.stringutils.to_unicode(fp.read()), "Hello world\n"
+                salt.utils.stringutils.to_unicode(fp.read()), f"Hello world{os.linesep}"
             )
 
     def test_remove_file(self):
diff --git a/tests/integration/modules/test_linux_shadow.py b/tests/integration/modules/test_linux_shadow.py
index c922cb2f618..e65b73b3540 100644
--- a/tests/integration/modules/test_linux_shadow.py
+++ b/tests/integration/modules/test_linux_shadow.py
@@ -52,6 +52,7 @@ class ShadowModuleTest(ModuleCase):
 
     @pytest.mark.destructive_test
     @pytest.mark.slow_test
+    @pytest.mark.skip_if_binaries_missing("passwd")
     def test_del_password(self):
         """
         Test shadow.del_password
@@ -61,8 +62,9 @@ class ShadowModuleTest(ModuleCase):
 
         # Correct Functionality
         self.assertTrue(self.run_function("shadow.del_password", [self._test_user]))
-        self.assertEqual(
-            self.run_function("shadow.info", [self._test_user])["passwd"], ""
+        self.assertIn(
+            self.run_function("shadow.info", [self._test_user])["passwd"],
+            ["", "!", "!!"],
         )
 
         # User does not exist
diff --git a/tests/integration/modules/test_localemod.py b/tests/integration/modules/test_localemod.py
index 7130895bafb..5a59e84e49a 100644
--- a/tests/integration/modules/test_localemod.py
+++ b/tests/integration/modules/test_localemod.py
@@ -1,13 +1,27 @@
+import subprocess
+
 import pytest
 
+import salt.utils.platform
 from tests.support.case import ModuleCase
 
 
+def _check_systemctl():
+    if not hasattr(_check_systemctl, "memo"):
+        if not salt.utils.platform.is_linux():
+            _check_systemctl.memo = False
+        else:
+            proc = subprocess.run(["localectl"], capture_output=True, check=False)
+            _check_systemctl.memo = b"No such file or directory" in proc.stderr
+    return _check_systemctl.memo
+
+
 @pytest.mark.skip_on_windows(reason="minion is windows")
 @pytest.mark.skip_on_darwin(reason="locale method is not supported on mac")
 @pytest.mark.skip_on_freebsd(
     reason="locale method is supported only within login classes or environment variables"
 )
+@pytest.mark.skipif(_check_systemctl(), reason="localectl degraded")
 @pytest.mark.requires_salt_modules("locale")
 @pytest.mark.windows_whitelisted
 class LocaleModuleTest(ModuleCase):
@@ -50,6 +64,7 @@ class LocaleModuleTest(ModuleCase):
 
     @pytest.mark.destructive_test
     @pytest.mark.slow_test
+    @pytest.mark.skipif(_check_systemctl(), reason="systemd degraded")
     def test_set_locale(self):
         original_locale = self.run_function("locale.get_locale")
         locale_to_set = self._find_new_locale(original_locale)
diff --git a/tests/integration/modules/test_timezone.py b/tests/integration/modules/test_timezone.py
index 3af9bf628ec..c9894c61089 100644
--- a/tests/integration/modules/test_timezone.py
+++ b/tests/integration/modules/test_timezone.py
@@ -4,8 +4,11 @@ Integration tests for timezone module
 Linux and Solaris are supported
 """
 
+import subprocess
+
 import pytest
 
+import salt.utils.platform
 from tests.support.case import ModuleCase
 
 try:
@@ -16,6 +19,17 @@ except ImportError:
     HAS_TZLOCAL = False
 
 
+def _check_systemctl():
+    if not hasattr(_check_systemctl, "memo"):
+        if not salt.utils.platform.is_linux():
+            _check_systemctl.memo = False
+        else:
+            proc = subprocess.run(["timedatectl"], capture_output=True, check=False)
+            _check_systemctl.memo = b"No such file or directory" in proc.stderr
+    return _check_systemctl.memo
+
+
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 class TimezoneLinuxModuleTest(ModuleCase):
     def setUp(self):
         """
@@ -32,6 +46,7 @@ class TimezoneLinuxModuleTest(ModuleCase):
         self.assertIn(ret, timescale)
 
 
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 class TimezoneSolarisModuleTest(ModuleCase):
     def setUp(self):
         """
diff --git a/tests/pytests/functional/channel/test_server.py b/tests/pytests/functional/channel/test_server.py
index cd2a828e41e..32f71068ac0 100644
--- a/tests/pytests/functional/channel/test_server.py
+++ b/tests/pytests/functional/channel/test_server.py
@@ -59,6 +59,8 @@ def transport_ids(value):
 @pytest.fixture(
     params=[
         "ws",
+        "tcp",
+        "zeromq",
     ],
     ids=transport_ids,
 )
@@ -173,6 +175,8 @@ def test_pub_server_channel(
         master_config,
     )
     server_channel.pre_fork(process_manager)
+    if not server_channel.transport.started.wait(30):
+        pytest.fail("Server channel did not start within 30 seconds.")
     req_server_channel = salt.channel.server.ReqServerChannel.factory(master_config)
     req_server_channel.pre_fork(process_manager)
 
diff --git a/tests/pytests/functional/cli/test_salt.py b/tests/pytests/functional/cli/test_salt.py
index c91655f8aff..8de8cc9d374 100644
--- a/tests/pytests/functional/cli/test_salt.py
+++ b/tests/pytests/functional/cli/test_salt.py
@@ -99,9 +99,7 @@ def test_help_log(salt_cli):
     """
     ret = salt_cli.run("--help")
     count = 0
-    stdout = ret.stdout.split("\n")
-    for line in stdout:
-        if "sensitive data:" in line:
-            count += 1
-            assert line.strip() == "sensitive data: all, debug, garbage, profile, trace"
-    assert count == 2
+    # This can be dependent on COLUMNS environment variable
+    assert "sensitive data: all, debug, garbage, profile, trace" in " ".join(
+        ret.stdout.split()
+    )
diff --git a/tests/pytests/functional/conftest.py b/tests/pytests/functional/conftest.py
index 2fb2246b633..0a8219b8f71 100644
--- a/tests/pytests/functional/conftest.py
+++ b/tests/pytests/functional/conftest.py
@@ -1,5 +1,6 @@
 import logging
 import shutil
+import sys
 
 import pytest
 from saltfactories.utils.functional import Loaders
@@ -70,6 +71,17 @@ def minion_opts(
             },
         }
     )
+
+    if sys.platform.startswith("win"):
+        # We need to set up winrepo on Windows
+        minion_config_overrides.update(
+            {
+                "winrepo_source_dir": "salt://winrepo_ng",
+                "winrepo_dir_ng": str(state_tree / "winrepo_ng"),
+                "winrepo_dir": str(state_tree / "winrepo"),
+            }
+        )
+
     factory = salt_factories.salt_minion_daemon(
         minion_id,
         defaults=minion_config_defaults or None,
diff --git a/tests/pytests/functional/formulas/test_nginx.py b/tests/pytests/functional/formulas/test_nginx.py
index d0efce1c75c..7c1541c9b68 100644
--- a/tests/pytests/functional/formulas/test_nginx.py
+++ b/tests/pytests/functional/formulas/test_nginx.py
@@ -6,6 +6,8 @@ import types
 
 import pytest
 
+from tests.pytests.functional.states.test_service import _check_systemctl
+
 pytestmark = [
     pytest.mark.skip_on_windows,
     pytest.mark.destructive_test,
@@ -22,6 +24,7 @@ def formula():
     return types.SimpleNamespace(name="nginx-formula", tag="2.8.1")
 
 
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_formula(modules):
     ret = modules.state.sls("nginx")
     assert not ret.errors
diff --git a/tests/pytests/functional/loader/test_dunder.py b/tests/pytests/functional/loader/test_dunder.py
new file mode 100644
index 00000000000..581d669a12f
--- /dev/null
+++ b/tests/pytests/functional/loader/test_dunder.py
@@ -0,0 +1,50 @@
+import salt.loader.context
+import salt.loader.lazy
+import salt.utils.files
+import tests.support.helpers
+
+
+def test_opts_dunder_opts_without_import(tmp_path):
+    """
+    Test __opts__ without being imported.
+
+    When a loaded module uses __opts__ but does not import it from
+    salt.loader.dunder the __opts__ object will be a dictionary.
+    """
+    opts = {"optimization_order": [0, 1, 2]}
+    with salt.utils.files.fopen(tmp_path / "mymod.py", "w") as fp:
+        fp.write(
+            tests.support.helpers.dedent(
+                """
+            def mymethod():
+                return __opts__
+            """
+            )
+        )
+    loader = salt.loader.lazy.LazyLoader([tmp_path], opts)
+    assert type(loader["mymod.mymethod"]()) == dict
+
+
+def test_opts_dunder_opts_with_import(tmp_path):
+    """
+    Test __opts__ when imported.
+
+    When a loaded module uses __opts__ by importing it from
+    salt.loader.dunder the __opts__ object will be a NamedLoaderContext.
+    """
+    opts = {"optimization_order": [0, 1, 2]}
+    with salt.utils.files.fopen(tmp_path / "mymod.py", "w") as fp:
+        fp.write(
+            tests.support.helpers.dedent(
+                """
+            from salt.loader.dunder import __opts__
+            def optstype():
+                return type(__opts__)
+            def opts():
+                return __opts__
+            """
+            )
+        )
+    loader = salt.loader.lazy.LazyLoader([tmp_path], opts)
+    assert loader["mymod.optstype"]() == salt.loader.context.NamedLoaderContext
+    assert loader["mymod.opts"]() == opts
diff --git a/tests/pytests/functional/loader/test_loader.py b/tests/pytests/functional/loader/test_loader.py
index f34ca2239cf..2ce19d2a2b6 100644
--- a/tests/pytests/functional/loader/test_loader.py
+++ b/tests/pytests/functional/loader/test_loader.py
@@ -23,7 +23,9 @@ def salt_extension(tmp_path_factory):
 
 @pytest.fixture
 def venv(tmp_path):
-    with SaltVirtualEnv(venv_dir=tmp_path / ".venv") as _venv:
+    with SaltVirtualEnv(
+        venv_dir=tmp_path / ".venv", system_site_packages=True
+    ) as _venv:
         yield _venv
 
 
diff --git a/tests/pytests/functional/modules/cmd/test_script.py b/tests/pytests/functional/modules/cmd/test_script.py
index c272835f0bf..dcdd632fa70 100644
--- a/tests/pytests/functional/modules/cmd/test_script.py
+++ b/tests/pytests/functional/modules/cmd/test_script.py
@@ -57,7 +57,7 @@ def test_windows_script_args_powershell(cmd, shell, issue_56195):
     )
     script = "salt://issue-56195/test.ps1"
 
-    ret = cmd.script(source=script, args=args, shell="powershell", saltenv="base")
+    ret = cmd.script(source=script, args=args, shell=shell, saltenv="base")
 
     assert ret["stdout"] == password
 
@@ -78,7 +78,7 @@ def test_windows_script_args_powershell_runas(cmd, shell, account, issue_56195):
     ret = cmd.script(
         source=script,
         args=args,
-        shell="powershell",
+        shell=shell,
         saltenv="base",
         runas=account.username,
         password=account.password,
diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py
index 7a72ba7f530..0acad0aad3d 100644
--- a/tests/pytests/functional/modules/state/test_jinja_filters.py
+++ b/tests/pytests/functional/modules/state/test_jinja_filters.py
@@ -385,8 +385,10 @@ def _filter_id(value):
             sls=r"""
             {% if grains['os'] == 'Windows' %}
               {% set result = 'c:\Windows\System32\cmd.exe' | is_bin_file() %}
+            {% elif grains['os_family'] == 'Debian' %}
+              {% set result = '/usr/bin/file' | is_bin_file() %}
             {% else %}
-              {% set result = '/bin/ls' | is_bin_file() %}
+              {% set result = '/bin/file' | is_bin_file() %}
             {% endif %}
             test:
               module.run:
diff --git a/tests/pytests/functional/modules/state/test_state.py b/tests/pytests/functional/modules/state/test_state.py
index 32b811be1dd..a800c5c4a5d 100644
--- a/tests/pytests/functional/modules/state/test_state.py
+++ b/tests/pytests/functional/modules/state/test_state.py
@@ -3,6 +3,7 @@ import os
 import textwrap
 import threading
 import time
+from textwrap import dedent
 
 import pytest
 
@@ -1086,3 +1087,97 @@ def test_state_sls_mock_ret(state_tree):
             ret["cmd_|-echo1_|-echo 'This is a test!'_|-run"]["comment"]
             == "Not called, mocked"
         )
+
+
+@pytest.fixture
+def _state_requires_env(loaders, state_tree):
+    mod_contents = dedent(
+        r"""
+        def test_it(name):
+            return {
+                "name": name,
+                "result": __env__ == "base",
+                "comment": "",
+                "changes": {},
+            }
+        """
+    )
+    sls = "test_spawning"
+    sls_contents = dedent(
+        """
+        This should not fail on spawning platforms:
+          requires_env.test_it:
+            - name: foo
+            - parallel: true
+        """
+    )
+    with pytest.helpers.temp_file(
+        f"{sls}.sls", sls_contents, state_tree
+    ), pytest.helpers.temp_file("_states/requires_env.py", mod_contents, state_tree):
+        res = loaders.modules.saltutil.sync_states()
+        assert "states.requires_env" in res
+        yield sls
+
+
+def test_state_apply_parallel_spawning_with_global_dunders(state, _state_requires_env):
+    """
+    Ensure state modules called via `parallel: true` have access to injected
+    global dunders like `__env__`.
+    """
+    ret = state.apply(_state_requires_env)
+    assert (
+        ret[
+            "requires_env_|-This should not fail on spawning platforms_|-foo_|-test_it"
+        ]["result"]
+        is True
+    )
+
+
+@pytest.fixture
+def _state_unpicklable_ctx(loaders, state_tree):
+    mod_contents = dedent(
+        r"""
+        import threading
+
+        class Unpicklable:
+            def __init__(self):
+                self._lock = threading.RLock()
+
+        def test_it():
+            __context__["booh"] = Unpicklable()
+        """
+    )
+    sls = "test_spawning_unpicklable"
+    sls_contents = dedent(
+        r"""
+        {%- do salt["unpicklable.test_it"]() %}
+
+        This should not fail on spawning platforms:
+          test.nop:
+            - name: foo
+            - parallel: true
+        """
+    )
+    with pytest.helpers.temp_file(
+        f"{sls}.sls", sls_contents, state_tree
+    ), pytest.helpers.temp_file("_modules/unpicklable.py", mod_contents, state_tree):
+        res = loaders.modules.saltutil.sync_modules()
+        assert "modules.unpicklable" in res
+        yield sls
+
+
+@pytest.mark.skip_unless_on_spawning_platform(
+    reason="Pickling is only relevant on spawning platforms"
+)
+def test_state_apply_parallel_spawning_with_unpicklable_context(
+    state, _state_unpicklable_ctx
+):
+    """
+    Ensure that if the __context__ dictionary contains unpicklable objects,
+    they are filtered out instead of causing a crash.
+    """
+    ret = state.apply(_state_unpicklable_ctx)
+    assert (
+        ret["test_|-This should not fail on spawning platforms_|-foo_|-nop"]["result"]
+        is True
+    )
diff --git a/tests/pytests/functional/modules/test_aptpkg.py b/tests/pytests/functional/modules/test_aptpkg.py
index 161e3c7827e..612ee7717db 100644
--- a/tests/pytests/functional/modules/test_aptpkg.py
+++ b/tests/pytests/functional/modules/test_aptpkg.py
@@ -14,6 +14,7 @@ import salt.modules.gpg as gpg
 import salt.modules.pkg_resource as pkg_resource
 import salt.utils.files
 import salt.utils.stringutils
+from salt.loader.dunder import __opts__
 from tests.support.mock import Mock, patch
 
 pytestmark = [
@@ -86,7 +87,7 @@ def configure_loader_modules(minion_opts, grains):
         },
         gpg: {},
         cp: {
-            "__opts__": minion_opts,
+            "__opts__": __opts__.with_default(minion_opts),
         },
         config: {
             "__opts__": minion_opts,
@@ -99,6 +100,9 @@ def configure_loader_modules(minion_opts, grains):
 
 @pytest.fixture()
 def revert_repo_file(tmp_path):
+    sources = pathlib.Path("/etc/apt/sources.list")
+    if not sources.exists():
+        sources.touch()
     try:
         repo_file = pathlib.Path("/etc") / "apt" / "sources.list"
         backup = tmp_path / "repo_backup"
@@ -116,7 +120,7 @@ def build_repo_file():
     source_path = "/etc/apt/sources.list.d/source_test_list.list"
     try:
         test_repos = [
-            "deb [signed-by=/etc/apt/keyrings/salt-archive-keyring-2023.gpg arch=amd64] https://repo.saltproject.io/salt/py3/ubuntu/22.04/amd64/latest jammy main",
+            "deb [signed-by=/etc/apt/keyrings/salt-archive-keyring-2023.gpg arch=amd64] https://packages.broadcom.com/artifactory/saltproject-deb/ jammy main",
             "deb http://dist.list stable/all/",
         ]
         with salt.utils.files.fopen(source_path, "w+") as fp:
diff --git a/tests/pytests/functional/modules/test_network.py b/tests/pytests/functional/modules/test_network.py
index ab6aef879e5..a05006bccd7 100644
--- a/tests/pytests/functional/modules/test_network.py
+++ b/tests/pytests/functional/modules/test_network.py
@@ -4,6 +4,8 @@ Validate network module
 
 import pytest
 
+import salt.utils.platform
+
 pytestmark = [
     pytest.mark.windows_whitelisted,
     pytest.mark.requires_network,
@@ -12,7 +14,7 @@ pytestmark = [
 
 @pytest.fixture(scope="module")
 def url(modules):
-    return "rewrite.amazon.com"
+    return "ns.google.com"
 
 
 @pytest.fixture(scope="module")
@@ -26,9 +28,18 @@ def test_network_ping(network, url):
     network.ping
     """
     ret = network.ping(url)
-    exp_out = ["ping", url, "ms", "time"]
-    for out in exp_out:
-        assert out in ret.lower()
+
+    # Github Runners are on Azure, which doesn't allow ping
+    packet_loss = "100% packet loss"
+    if salt.utils.platform.is_windows():
+        packet_loss = "100% loss"
+
+    if packet_loss not in ret.lower():
+        exp_out = ["ping", url, "ms", "time"]
+        for out in exp_out:
+            assert out in ret.lower()
+    else:
+        assert packet_loss in ret.lower()
 
 
 @pytest.mark.skip_on_darwin(reason="Not supported on macOS")
diff --git a/tests/pytests/functional/modules/test_pkg.py b/tests/pytests/functional/modules/test_pkg.py
index 4899851efb2..c46b751a88a 100644
--- a/tests/pytests/functional/modules/test_pkg.py
+++ b/tests/pytests/functional/modules/test_pkg.py
@@ -135,12 +135,8 @@ def test_mod_del_repo(grains, modules):
         elif grains["os_family"] == "RedHat":
             repo = "saltstack"
             name = "SaltStack repo for RHEL/CentOS {}".format(grains["osmajorrelease"])
-            baseurl = "https://repo.saltproject.io/py3/redhat/{}/x86_64/latest/".format(
-                grains["osmajorrelease"]
-            )
-            gpgkey = "https://repo.saltproject.io/py3/redhat/{}/x86_64/latest/SALTSTACK-GPG-KEY.pub".format(
-                grains["osmajorrelease"]
-            )
+            baseurl = "https://packages.broadcom.com/artifactory/saltproject-rpm/"
+            gpgkey = "https://packages.broadcom.com/artifactory/api/security/keypair/SaltProjectKey/public"
             gpgcheck = 1
             enabled = 1
             ret = modules.pkg.mod_repo(
diff --git a/tests/pytests/functional/modules/test_service.py b/tests/pytests/functional/modules/test_service.py
index a54ff1fbe6c..8384c9c5b20 100644
--- a/tests/pytests/functional/modules/test_service.py
+++ b/tests/pytests/functional/modules/test_service.py
@@ -1,4 +1,5 @@
 import os
+import subprocess
 
 import pytest
 
@@ -14,6 +15,19 @@ pytestmark = [
 ]
 
 
+def _check_systemctl():
+    if not hasattr(_check_systemctl, "memo"):
+        if not salt.utils.platform.is_linux():
+            _check_systemctl.memo = False
+        else:
+            proc = subprocess.run(["systemctl"], capture_output=True, check=False)
+            _check_systemctl.memo = (
+                b"Failed to get D-Bus connection: No such file or directory"
+                in proc.stderr
+            )
+    return _check_systemctl.memo
+
+
 @pytest.fixture
 def service_name(grains, modules):
     # For local testing purposes
@@ -68,6 +82,7 @@ def setup_service(service_name, modules):
                 modules.service.disable(service_name)
 
 
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_service_status_running(modules, service_name):
     """
     test service.status execution module
@@ -88,6 +103,7 @@ def test_service_status_dead(modules, service_name):
     assert not check_service
 
 
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_service_restart(modules, service_name):
     """
     test service.restart
@@ -95,6 +111,7 @@ def test_service_restart(modules, service_name):
     assert modules.service.stop(service_name)
 
 
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_service_enable(modules, service_name):
     """
     test service.get_enabled and service.enable module
@@ -106,6 +123,7 @@ def test_service_enable(modules, service_name):
     assert service_name in modules.service.get_enabled()
 
 
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_service_disable(modules, service_name):
     """
     test service.get_disabled and service.disable module
diff --git a/tests/pytests/functional/modules/test_system.py b/tests/pytests/functional/modules/test_system.py
index 07f34e8a516..8ffb048fd58 100644
--- a/tests/pytests/functional/modules/test_system.py
+++ b/tests/pytests/functional/modules/test_system.py
@@ -5,10 +5,12 @@ import shutil
 import signal
 import subprocess
 import textwrap
+import time
 
 import pytest
 
 import salt.utils.files
+from salt.exceptions import CommandExecutionError
 
 pytestmark = [
     pytest.mark.skip_unless_on_linux,
@@ -18,6 +20,21 @@ pytestmark = [
 log = logging.getLogger(__name__)
 
 
+def check_hostnamectl():
+    if not hasattr(check_hostnamectl, "memo"):
+        if not salt.utils.platform.is_linux():
+            check_hostnamectl.memo = False
+        else:
+            proc = subprocess.run(["hostnamectl"], capture_output=True, check=False)
+            check_hostnamectl.memo = (
+                b"Failed to connect to bus: No such file or directory" in proc.stderr
+                or b"Failed to create bus connection: No such file or directory"
+                in proc.stderr
+                or b"Failed to query system properties" in proc.stderr
+            )
+    return check_hostnamectl.memo
+
+
 @pytest.fixture(scope="module")
 def cmdmod(modules):
     return modules.cmd
@@ -50,6 +67,7 @@ def fmt_str():
 
 @pytest.fixture(scope="function")
 def setup_teardown_vars(file, service, system):
+    _systemd_timesyncd_available_ = None
     _orig_time = datetime.datetime.utcnow()
 
     if os.path.isfile("/etc/machine-info"):
@@ -76,7 +94,13 @@ def setup_teardown_vars(file, service, system):
             file.remove("/etc/machine-info")
 
         if _systemd_timesyncd_available_:
-            res = service.start("systemd-timesyncd")
+            try:
+                res = service.start("systemd-timesyncd")
+            except CommandExecutionError:
+                # We possibly did too many restarts in too short time
+                # Wait 10s (default systemd timeout) and try again
+                time.sleep(10)
+                res = service.start("systemd-timesyncd")
             assert res
 
 
@@ -175,6 +199,7 @@ def _test_hwclock_sync(system, hwclock_has_compare):
         log.error("Failed to check hwclock sync")
 
 
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_get_system_date_time(setup_teardown_vars, system, fmt_str):
     """
     Test we are able to get the correct time
@@ -186,6 +211,7 @@ def test_get_system_date_time(setup_teardown_vars, system, fmt_str):
     assert _same_times(t1, t2, seconds_diff=3), msg
 
 
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_get_system_date_time_utc(setup_teardown_vars, system, fmt_str):
     """
     Test we are able to get the correct time with utc
@@ -197,9 +223,9 @@ def test_get_system_date_time_utc(setup_teardown_vars, system, fmt_str):
     assert _same_times(t1, t2, seconds_diff=3), msg
 
 
-@pytest.mark.skip_on_env("ON_DOCKER", eq="1")
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_system_date_time(setup_teardown_vars, system, hwclock_has_compare):
     """
     Test changing the system clock. We are only able to set it up to a
@@ -218,9 +244,9 @@ def test_set_system_date_time(setup_teardown_vars, system, hwclock_has_compare):
     _test_hwclock_sync(system, hwclock_has_compare)
 
 
-@pytest.mark.skip_on_env("ON_DOCKER", eq="1")
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_system_date_time_utc(setup_teardown_vars, system, hwclock_has_compare):
     """
     Test changing the system clock. We are only able to set it up to a
@@ -239,9 +265,9 @@ def test_set_system_date_time_utc(setup_teardown_vars, system, hwclock_has_compa
     _test_hwclock_sync(system, hwclock_has_compare)
 
 
-@pytest.mark.skip_on_env("ON_DOCKER", eq="1")
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_system_date_time_utcoffset_east(
     setup_teardown_vars, system, hwclock_has_compare
 ):
@@ -263,9 +289,9 @@ def test_set_system_date_time_utcoffset_east(
     _test_hwclock_sync(system, hwclock_has_compare)
 
 
-@pytest.mark.skip_on_env("ON_DOCKER", eq="1")
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_system_date_time_utcoffset_west(
     setup_teardown_vars, system, hwclock_has_compare
 ):
@@ -287,10 +313,10 @@ def test_set_system_date_time_utcoffset_west(
     _test_hwclock_sync(system, hwclock_has_compare)
 
 
-@pytest.mark.skip_on_env("ON_DOCKER", eq="1")
 @pytest.mark.flaky(max_runs=4)
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_system_time(setup_teardown_vars, system, hwclock_has_compare):
     """
     Test setting the system time without adjusting the date.
@@ -309,9 +335,9 @@ def test_set_system_time(setup_teardown_vars, system, hwclock_has_compare):
     _test_hwclock_sync(system, hwclock_has_compare)
 
 
-@pytest.mark.skip_on_env("ON_DOCKER", eq="1")
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_system_date(setup_teardown_vars, system, hwclock_has_compare):
     """
     Test setting the system date without adjusting the time.
@@ -332,6 +358,7 @@ def test_set_system_date(setup_teardown_vars, system, hwclock_has_compare):
 
 
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_get_computer_desc(setup_teardown_vars, system, cmdmod):
     """
     Test getting the system hostname
@@ -353,6 +380,7 @@ def test_get_computer_desc(setup_teardown_vars, system, cmdmod):
 
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_computer_desc(setup_teardown_vars, system):
     """
     Test setting the computer description
@@ -367,6 +395,7 @@ def test_set_computer_desc(setup_teardown_vars, system):
 
 @pytest.mark.destructive_test
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_set_computer_desc_multiline(setup_teardown_vars, system):
     """
     Test setting the computer description with a multiline string with tabs
@@ -386,6 +415,7 @@ def test_set_computer_desc_multiline(setup_teardown_vars, system):
 
 
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(check_hostnamectl(), reason="hostnamctl degraded.")
 def test_has_hwclock(setup_teardown_vars, system, grains, hwclock_has_compare):
     """
     Verify platform has a settable hardware clock, if possible.
diff --git a/tests/pytests/functional/modules/test_win_pkg.py b/tests/pytests/functional/modules/test_win_pkg.py
index b68895ef625..6bcfaa9bd84 100644
--- a/tests/pytests/functional/modules/test_win_pkg.py
+++ b/tests/pytests/functional/modules/test_win_pkg.py
@@ -29,7 +29,7 @@ def pkg(modules):
 
 def test_refresh_db(pkg, pkg_def_contents, state_tree, minion_opts):
     assert len(pkg.get_package_info("my-software")) == 0
-    repo_dir = state_tree / "win" / "repo-ng"
+    repo_dir = state_tree / "winrepo_ng"
     with pytest.helpers.temp_file("my-software.sls", pkg_def_contents, repo_dir):
         pkg.refresh_db()
     assert len(pkg.get_package_info("my-software")) == 1
diff --git a/tests/pytests/functional/modules/test_win_useradd.py b/tests/pytests/functional/modules/test_win_useradd.py
index 5e33ce36bd4..781ec2d776c 100644
--- a/tests/pytests/functional/modules/test_win_useradd.py
+++ b/tests/pytests/functional/modules/test_win_useradd.py
@@ -223,9 +223,9 @@ def test_list_groups_int(user, account_int):
     assert ret == ["Users"]
 
 
-def test_list_users(user):
+def test_list_users(user, account_str):
     ret = user.list_users()
-    assert "Administrator" in ret
+    assert account_str.username in ret
 
 
 def test_removegroup_str(user, account_str):
diff --git a/tests/pytests/functional/modules/win_file/test_check_perms.py b/tests/pytests/functional/modules/win_file/test_check_perms.py
index f2196185904..34a795124e0 100644
--- a/tests/pytests/functional/modules/win_file/test_check_perms.py
+++ b/tests/pytests/functional/modules/win_file/test_check_perms.py
@@ -3,8 +3,11 @@ Tests for win_file execution module
 """
 
 import pytest
+from saltfactories.utils import random_string
 
+import salt.modules.cmdmod as cmd
 import salt.modules.win_file as win_file
+import salt.modules.win_useradd as win_useradd
 import salt.utils.win_dacl as win_dacl
 from salt.exceptions import CommandExecutionError
 from tests.support.mock import patch
@@ -25,9 +28,22 @@ def configure_loader_modules():
             },
             "__opts__": {"test": False},
         },
+        win_useradd: {
+            "__salt__": {
+                "cmd.run_all": cmd.run_all,
+            },
+        },
     }
 
 
+@pytest.fixture
+def temp_account():
+    user_name = random_string("test-account-", uppercase=False)
+    with pytest.helpers.create_account(username=user_name) as account:
+        win_useradd.addgroup(account.username, "Users")
+        yield account.username
+
+
 @pytest.fixture(scope="function")
 def test_file():
     with pytest.helpers.temp_file("win_file_test.file") as test_file:
@@ -184,7 +200,7 @@ def test_check_perms_inheritance_true(test_file):
     assert result == expected
 
 
-def test_check_perms_reset_test_true(test_file):
+def test_check_perms_reset_test_true(test_file, temp_account):
     """
     Test resetting perms with test=True. This shows minimal changes
     """
@@ -193,7 +209,7 @@ def test_check_perms_reset_test_true(test_file):
     # Set some permissions
     win_dacl.set_permissions(
         obj_name=str(test_file),
-        principal="Administrator",
+        principal=temp_account,
         permissions="full_control",
     )
     expected = {
@@ -204,7 +220,7 @@ def test_check_perms_reset_test_true(test_file):
                 "Users": {"permissions": "read_execute"},
             },
             "remove_perms": {
-                "Administrator": {
+                f"{temp_account}": {
                     "grant": {
                         "applies to": "This folder only",
                         "permissions": "Full control",
@@ -228,7 +244,7 @@ def test_check_perms_reset_test_true(test_file):
         assert result == expected
 
 
-def test_check_perms_reset(test_file):
+def test_check_perms_reset(test_file, temp_account):
     """
     Test resetting perms on a File
     """
@@ -237,7 +253,7 @@ def test_check_perms_reset(test_file):
     # Set some permissions
     win_dacl.set_permissions(
         obj_name=str(test_file),
-        principal="Administrator",
+        principal=temp_account,
         permissions="full_control",
     )
     expected = {
@@ -248,7 +264,7 @@ def test_check_perms_reset(test_file):
                 "Users": {"permissions": "read_execute"},
             },
             "remove_perms": {
-                "Administrator": {
+                f"{temp_account}": {
                     "grant": {
                         "applies to": "This folder only",
                         "permissions": "Full control",
diff --git a/tests/pytests/functional/pillar/test_git_pillar.py b/tests/pytests/functional/pillar/test_git_pillar.py
index 6fd3dee431b..7bbfea3bcf1 100644
--- a/tests/pytests/functional/pillar/test_git_pillar.py
+++ b/tests/pytests/functional/pillar/test_git_pillar.py
@@ -2,6 +2,7 @@ import pytest
 
 from salt.pillar.git_pillar import ext_pillar
 from salt.utils.immutabletypes import ImmutableDict, ImmutableList
+from salt.utils.odict import OrderedDict
 from tests.support.mock import patch
 
 pytestmark = [
@@ -260,3 +261,38 @@ def test_gitpython_multiple_2(gitpython_pillar_opts, grains):
 @skipif_no_pygit2
 def test_pygit2_multiple_2(pygit2_pillar_opts, grains):
     _test_multiple_2(pygit2_pillar_opts, grains)
+
+
+def _test_multiple_slash_in_branch_name(pillar_opts, grains):
+    pillar_opts["pillarenv"] = "doggy/moggy"
+    data = _get_ext_pillar(
+        "minion",
+        pillar_opts,
+        grains,
+        "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
+    )
+    assert data == {
+        "key": "data",
+        "foo": OrderedDict(
+            [
+                ("animals", OrderedDict([("breed", "seadog")])),
+                (
+                    "feature/baz",
+                    OrderedDict(
+                        [("test1", "dog"), ("test2", "kat"), ("test3", "gerbil")]
+                    ),
+                ),
+            ]
+        ),
+    }
+
+
+@skipif_no_gitpython
+def test_gitpython_multiple_slash_in_branch_name(gitpython_pillar_opts, grains):
+    _test_multiple_slash_in_branch_name(gitpython_pillar_opts, grains)
+
+
+@skipif_no_pygit2
+def test_pygit2_multiple_slash_in_branch_name(pygit2_pillar_opts, grains):
+    _test_multiple_slash_in_branch_name(pygit2_pillar_opts, grains)
diff --git a/tests/pytests/functional/runners/test_winrepo.py b/tests/pytests/functional/runners/test_winrepo.py
index f744c2a98dd..c1dbf7ff302 100644
--- a/tests/pytests/functional/runners/test_winrepo.py
+++ b/tests/pytests/functional/runners/test_winrepo.py
@@ -24,6 +24,7 @@ def configure_loader_modules(minion_opts, tmp_path):
 @pytest.fixture
 def winrepo_remotes(minion_opts):
     remotes = set()
+    # Legacy repo has been archived as of September 2024
     remotes.update(minion_opts.get("winrepo_remotes", []))
     remotes.update(minion_opts.get("winrepo_remotes_ng", []))
     return remotes
diff --git a/tests/pytests/functional/states/chocolatey/test_post_20.py b/tests/pytests/functional/states/chocolatey/test_post_20.py
index e6c5d25bc9e..5e94d488349 100644
--- a/tests/pytests/functional/states/chocolatey/test_post_20.py
+++ b/tests/pytests/functional/states/chocolatey/test_post_20.py
@@ -114,10 +114,10 @@ def vim(chocolatey_mod):
 
 
 @pytest.fixture(scope="function")
-def everything(chocolatey_mod):
-    chocolatey_mod.install(name="everything", version="1.4.1935")
+def sudo(chocolatey_mod):
+    chocolatey_mod.install(name="sudo", version="1.1.2")
     yield
-    chocolatey_mod.uninstall(name="everything", force=True)
+    chocolatey_mod.uninstall(name="sudo", force=True)
 
 
 def test_installed_latest(clean, chocolatey, chocolatey_mod):
@@ -133,11 +133,10 @@ def test_installed_version(clean, chocolatey, chocolatey_mod):
     assert result["vim"]["installed"][0] == "9.0.1672"
 
 
-def test_installed_version_existing_capitalization(
-    everything, chocolatey, chocolatey_mod
-):
-    result = chocolatey.installed(name="everything", version="1.4.11024")
-    expected_changes = {"Everything": {"new": ["1.4.11024"], "old": ["1.4.1935"]}}
+# @pytest.mark.skipif(True, reason="Timing out, skipping for now")
+def test_installed_version_existing_capitalization(sudo, chocolatey, chocolatey_mod):
+    result = chocolatey.installed(name="sudo", version="1.1.3")
+    expected_changes = {"Sudo": {"new": ["1.1.3"], "old": ["1.1.2"]}}
     assert result["changes"] == expected_changes
 
 
diff --git a/tests/pytests/functional/states/chocolatey/test_pre_20.py b/tests/pytests/functional/states/chocolatey/test_pre_20.py
index e951c1d180b..cfefe13f139 100644
--- a/tests/pytests/functional/states/chocolatey/test_pre_20.py
+++ b/tests/pytests/functional/states/chocolatey/test_pre_20.py
@@ -110,10 +110,10 @@ def vim(chocolatey_mod):
 
 
 @pytest.fixture(scope="function")
-def everything(chocolatey_mod):
-    chocolatey_mod.install(name="everything", version="1.4.1935")
+def sudo(chocolatey_mod):
+    chocolatey_mod.install(name="sudo", version="1.1.2")
     yield
-    chocolatey_mod.uninstall(name="everything", force=True)
+    chocolatey_mod.uninstall(name="sudo", force=True)
 
 
 def test_installed_latest(clean, chocolatey, chocolatey_mod):
@@ -129,11 +129,10 @@ def test_installed_version(clean, chocolatey, chocolatey_mod):
     assert result["vim"]["installed"][0] == "9.0.1672"
 
 
-def test_installed_version_existing_capitalization(
-    everything, chocolatey, chocolatey_mod
-):
-    result = chocolatey.installed(name="everything", version="1.4.11024")
-    expected_changes = {"Everything": {"new": ["1.4.11024"], "old": ["1.4.1935"]}}
+# @pytest.mark.skipif(True, reason="Timing out, skipping for now")
+def test_installed_version_existing_capitalization(sudo, chocolatey, chocolatey_mod):
+    result = chocolatey.installed(name="sudo", version="1.1.3")
+    expected_changes = {"Sudo": {"new": ["1.1.3"], "old": ["1.1.2"]}}
     assert result["changes"] == expected_changes
 
 
diff --git a/tests/pytests/functional/states/cmd/test_cmd_run.py b/tests/pytests/functional/states/cmd/test_cmd_run.py
new file mode 100644
index 00000000000..22a5ba65698
--- /dev/null
+++ b/tests/pytests/functional/states/cmd/test_cmd_run.py
@@ -0,0 +1,64 @@
+import os
+
+import pytest
+
+import salt.utils.path
+
+pytestmark = [
+    pytest.mark.windows_whitelisted,
+    pytest.mark.skip_unless_on_windows,
+    pytest.mark.destructive_test,
+    pytest.mark.slow_test,
+]
+
+
+@pytest.fixture(params=["powershell", "pwsh"])
+def shell(request):
+    """
+    This will run the test on powershell and powershell core (pwsh). If
+    powershell core is not installed that test run will be skipped
+    """
+
+    if request.param == "pwsh" and salt.utils.path.which("pwsh") is None:
+        pytest.skip("Powershell 7 Not Present")
+    return request.param
+
+
+def test_cmd_run_unless_true(shell, cmd):
+    # We need a directory that we know exists that has stuff in it
+    win_dir = os.getenv("WINDIR")
+    ret = cmd.run(name="echo foo", unless=f"ls {win_dir}", shell=shell)
+    assert ret.filtered["result"] is True
+    assert ret.filtered["name"] == "echo foo"
+    assert ret.filtered["comment"] == "unless condition is true"
+    assert ret.filtered["changes"] == {}
+
+
+def test_cmd_run_unless_false(shell, cmd):
+    # We need a directory that we know does not exist
+    win_dir = "C:\\This\\Dir\\Does\\Not\\Exist"
+    ret = cmd.run(name="echo foo", unless=f"ls {win_dir}", shell=shell)
+    assert ret.filtered["result"] is True
+    assert ret.filtered["name"] == "echo foo"
+    assert ret.filtered["comment"] == 'Command "echo foo" run'
+    assert ret.filtered["changes"]["stdout"] == "foo"
+
+
+def test_cmd_run_onlyif_true(shell, cmd):
+    # We need a directory that we know exists that has stuff in it
+    win_dir = os.getenv("WINDIR")
+    ret = cmd.run(name="echo foo", onlyif=f"ls {win_dir}", shell=shell)
+    assert ret.filtered["result"] is True
+    assert ret.filtered["name"] == "echo foo"
+    assert ret.filtered["comment"] == 'Command "echo foo" run'
+    assert ret.filtered["changes"]["stdout"] == "foo"
+
+
+def test_cmd_run_onlyif_false(shell, cmd):
+    # We need a directory that we know does not exist
+    win_dir = "C:\\This\\Dir\\Does\\Not\\Exist"
+    ret = cmd.run(name="echo foo", onlyif=f"ls {win_dir}", shell=shell)
+    assert ret.filtered["result"] is True
+    assert ret.filtered["name"] == "echo foo"
+    assert ret.filtered["comment"] == "onlyif condition is false"
+    assert ret.filtered["changes"] == {}
diff --git a/tests/pytests/functional/states/file/conftest.py b/tests/pytests/functional/states/file/conftest.py
index 9d5022aa0f8..c48658d9e17 100644
--- a/tests/pytests/functional/states/file/conftest.py
+++ b/tests/pytests/functional/states/file/conftest.py
@@ -39,7 +39,13 @@ def holy(state_tree_prod):
 
 @pytest.fixture
 def grail_scene33_file(grail):
-    return grail / "scene33"
+    signed_file = grail / "scene33"
+    hash_file = signed_file.with_suffix(".SHA256")
+    for test_file in (signed_file, hash_file):
+        content = test_file.read_bytes()
+        content = content.replace(b"\r\n", b"\n")
+        test_file.write_bytes(content)
+    return signed_file
 
 
 @pytest.fixture
diff --git a/tests/pytests/functional/states/file/test__check_directory_win.py b/tests/pytests/functional/states/file/test__check_directory_win.py
index 6568be82304..bde28fab064 100644
--- a/tests/pytests/functional/states/file/test__check_directory_win.py
+++ b/tests/pytests/functional/states/file/test__check_directory_win.py
@@ -17,15 +17,74 @@ def configure_loader_modules():
     }
 
 
-def test__check_directory_win_owner(tmp_path):
-    path = str(tmp_path)
+@pytest.fixture
+def temp_path(tmp_path):
+
+    # Ownership is not inherited but permissions are, so we shouldn't have to
+    # set ownership. Ownership is determined by the user creating the directory.
+    # An administrator account will set the owner as the Administrators group.
+    # A non-administrator account will set the user itself as the owner.
+
+    # Create a directory and set the permissions to make sure they're the only
+    # ones (reset_perms=True) and not inherited (protected=True)
+    tmp_path.mkdir(parents=True, exist_ok=True)
+    win_dacl.set_permissions(
+        obj_name=str(tmp_path),
+        principal="Administrators",
+        permissions="full_control",
+        access_mode="grant",
+        reset_perms=True,
+        protected=True,
+    )
+    perms = {
+        "Inherited": {},
+        "Not Inherited": {
+            "Administrators": {
+                "grant": {
+                    "applies to": "This folder, subfolders and files",
+                    "permissions": "Full control",
+                }
+            }
+        },
+    }
+    # Verify perms and inheritance
+    assert win_dacl.get_permissions(obj_name=str(tmp_path)) == perms
+    assert not win_dacl.get_inheritance(obj_name=str(tmp_path))
+
+    # Now we create a directory for testing that does inherit those permissions
+    # from the above, new parent directory
+    test_dir = tmp_path / "test_dir"
+    test_dir.mkdir()
+
+    # We want to make sure inheritance is enabled
+    assert win_dacl.get_inheritance(obj_name=str(test_dir))
+
+    # We want to make sure the test directory inherited permissions from the
+    # parent directory
+    perms = {
+        "Inherited": {
+            "Administrators": {
+                "grant": {
+                    "applies to": "This folder, subfolders and files",
+                    "permissions": "Full control",
+                }
+            }
+        },
+        "Not Inherited": {},
+    }
+    assert win_dacl.get_permissions(obj_name=str(test_dir)) == perms
+    yield test_dir
+
+
+def test__check_directory_win_owner(temp_path):
+    path = str(temp_path)
     _, comment, changes = file._check_directory_win(name=path, win_owner="Everyone")
     assert path in comment
     assert changes == {"owner": "Everyone"}
 
 
-def test__check_directory_win_grant_perms_basic(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_grant_perms_basic(temp_path):
+    path = str(temp_path)
     perms = {
         "Guest": {
             "applies_to": "this_folder_subfolders_files",
@@ -45,8 +104,8 @@ def test__check_directory_win_grant_perms_basic(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_grant_perms_basic_existing_user(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_grant_perms_basic_existing_user(temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Guest",
@@ -60,8 +119,8 @@ def test__check_directory_win_grant_perms_basic_existing_user(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_grant_perms_advanced(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_grant_perms_advanced(temp_path):
+    path = str(temp_path)
     perms = {
         "Guest": {
             "applies_to": "this_folder_subfolders_files",
@@ -81,8 +140,8 @@ def test__check_directory_win_grant_perms_advanced(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_grant_perms_advanced_existing_user(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_grant_perms_advanced_existing_user(temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Guest",
@@ -105,8 +164,8 @@ def test__check_directory_win_grant_perms_advanced_existing_user(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_grant_perms_basic_no_applies_to(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_grant_perms_basic_no_applies_to(temp_path):
+    path = str(temp_path)
     perms = {"Guest": {"perms": "full_control"}}
     expected = {"grant_perms": {"Guest": {"permissions": "full_control"}}}
     _, comment, changes = file._check_directory_win(name=path, win_perms=perms)
@@ -114,8 +173,8 @@ def test__check_directory_win_grant_perms_basic_no_applies_to(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_deny_perms_basic(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_deny_perms_basic(temp_path):
+    path = str(temp_path)
     perms = {
         "Guest": {
             "applies_to": "this_folder_subfolders_files",
@@ -135,8 +194,8 @@ def test__check_directory_win_deny_perms_basic(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_deny_perms_basic_existing_user(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_deny_perms_basic_existing_user(temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Guest",
@@ -150,8 +209,8 @@ def test__check_directory_win_deny_perms_basic_existing_user(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_deny_perms_advanced(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_deny_perms_advanced(temp_path):
+    path = str(temp_path)
     perms = {
         "Guest": {
             "applies_to": "this_folder_subfolders_files",
@@ -171,8 +230,8 @@ def test__check_directory_win_deny_perms_advanced(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_deny_perms_advanced_existing_user(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_deny_perms_advanced_existing_user(temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Guest",
@@ -195,8 +254,8 @@ def test__check_directory_win_deny_perms_advanced_existing_user(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_deny_perms_basic_no_applies_to(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_deny_perms_basic_no_applies_to(temp_path):
+    path = str(temp_path)
     perms = {"Guest": {"perms": "full_control"}}
     expected = {"deny_perms": {"Guest": {"permissions": "full_control"}}}
     _, comment, changes = file._check_directory_win(name=path, win_deny_perms=perms)
@@ -204,32 +263,32 @@ def test__check_directory_win_deny_perms_basic_no_applies_to(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_win_inheritance(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_inheritance(temp_path):
+    path = str(temp_path)
     expected = {}
     _, comment, changes = file._check_directory_win(name=path, win_inheritance=True)
     assert path in comment
     assert changes == expected
 
 
-def test__check_directory_win_inheritance_false(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_win_inheritance_false(temp_path):
+    path = str(temp_path)
     expected = {"inheritance": False}
     _, comment, changes = file._check_directory_win(name=path, win_inheritance=False)
     assert path in comment
     assert changes == expected
 
 
-def test__check_directory_reset_no_non_inherited_users(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_reset_no_non_inherited_users(temp_path):
+    path = str(temp_path)
     expected = {}
     _, comment, changes = file._check_directory_win(name=path, win_perms_reset=True)
     assert path in comment
     assert changes == expected
 
 
-def test__check_directory_reset_non_inherited_users_grant(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_reset_non_inherited_users_grant(temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Guest",
@@ -252,8 +311,8 @@ def test__check_directory_reset_non_inherited_users_grant(tmp_path):
     assert changes == expected
 
 
-def test__check_directory_reset_non_inherited_users_deny(tmp_path):
-    path = str(tmp_path)
+def test__check_directory_reset_non_inherited_users_deny(temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Guest",
diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py
index 19cb3f94876..328c3412b70 100644
--- a/tests/pytests/functional/states/file/test_directory.py
+++ b/tests/pytests/functional/states/file/test_directory.py
@@ -34,11 +34,6 @@ def test_directory_symlink_dry_run(file, tmp_path):
     Ensure that symlinks are followed when file.directory is run with
     test=True
     """
-    if IS_WINDOWS and not os.environ.get("GITHUB_ACTIONS_PIPELINE"):
-        pytest.xfail(
-            "This test fails when running from Jenkins but not on the GitHub "
-            "Actions Pipeline"
-        )
     tmp_dir = tmp_path / "pgdata"
     sym_dir = tmp_path / "pg_data"
 
@@ -57,7 +52,22 @@ def test_directory_symlink_dry_run(file, tmp_path):
     ret = file.directory(
         test=True, name=str(sym_dir), follow_symlinks=True, **extra_kwds
     )
-    assert ret.result is True
+
+    expected = True
+
+    if IS_WINDOWS:
+        # On Windows the result is None because there would have been changes
+        # made to the directory (making Administrator the Owner)
+        # https://docs.saltproject.io/en/latest/ref/states/writing.html#return-data
+        expected = None
+
+    assert ret.result is expected
+
+
+def _kernel_check(lookfor):
+    with salt.utils.files.fopen("/proc/version") as fp:
+        versioninfo = fp.read().lower()
+    return lookfor in versioninfo
 
 
 @pytest.mark.skip_if_not_root
@@ -83,7 +93,10 @@ def test_directory_max_depth(file, tmp_path):
     initial_mode = "0111"
     changed_mode = "0555"
 
-    if salt.utils.platform.is_photonos():
+    # Check that we are not just running photon but the kernel matches. This
+    # check should fail if we are in a photon container running on and os other
+    # than photon.
+    if salt.utils.platform.is_photonos() and _kernel_check("photon"):
         initial_modes = {
             0: {sub: "0750", subsub: "0110"},
             1: {sub: "0110", subsub: "0110"},
diff --git a/tests/pytests/functional/states/file/test_directory_win.py b/tests/pytests/functional/states/file/test_directory_win.py
index 685f48195c3..e3575f8f3b5 100644
--- a/tests/pytests/functional/states/file/test_directory_win.py
+++ b/tests/pytests/functional/states/file/test_directory_win.py
@@ -1,15 +1,6 @@
-import os
-
 import pytest
 
 import salt.utils.win_dacl as win_dacl
-import salt.utils.win_functions as win_functions
-
-try:
-    CURRENT_USER = win_functions.get_current_user(with_domain=False)
-except NameError:
-    # Not a Windows Machine
-    pass
 
 pytestmark = [
     pytest.mark.windows_whitelisted,
@@ -18,12 +9,64 @@ pytestmark = [
 ]
 
 
-def test_directory_new(file, tmp_path):
+@pytest.fixture
+def temp_path(tmp_path):
+    # We need to create a directory that doesn't inherit permissions from the test suite
+    tmp_path.mkdir(parents=True, exist_ok=True)
+    win_dacl.set_owner(obj_name=str(tmp_path), principal="Administrators")
+    assert win_dacl.get_owner(obj_name=str(tmp_path)) == "Administrators"
+    # We don't want the parent test directory to inherit permissions
+    win_dacl.set_inheritance(obj_name=str(tmp_path), enabled=False)
+    assert not win_dacl.get_inheritance(obj_name=str(tmp_path))
+    # Set these permissions and make sure they're the only ones
+    win_dacl.set_permissions(
+        obj_name=str(tmp_path),
+        principal="Administrators",
+        permissions="full_control",
+        access_mode="grant",
+        reset_perms=True,
+        protected=True,
+    )
+    perms = {
+        "Inherited": {},
+        "Not Inherited": {
+            "Administrators": {
+                "grant": {
+                    "applies to": "This folder, subfolders and files",
+                    "permissions": "Full control",
+                }
+            }
+        },
+    }
+    assert win_dacl.get_permissions(obj_name=str(tmp_path)) == perms
+
+    # Now we create a directory for testing that does inherit those permissions from the above, new parent directory
+    test_dir = tmp_path / "test_dir"
+    test_dir.mkdir()
+    # We do want the test directory to inherit permissions from the parent directory
+    assert win_dacl.get_inheritance(obj_name=str(test_dir))
+    # Make sure the permissions are inherited from the parent
+    perms = {
+        "Inherited": {
+            "Administrators": {
+                "grant": {
+                    "applies to": "This folder, subfolders and files",
+                    "permissions": "Full control",
+                }
+            }
+        },
+        "Not Inherited": {},
+    }
+    assert win_dacl.get_permissions(obj_name=str(test_dir)) == perms
+    yield test_dir
+
+
+def test_directory_new(file, temp_path):
     """
     Test file.directory when the directory does not exist
     Should just return "New Dir"
     """
-    path = os.path.join(tmp_path, "test")
+    path = str(temp_path / "test")
     ret = file.directory(
         name=path,
         makedirs=True,
@@ -41,18 +84,6 @@ def test_directory_new(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
         },
         "Not Inherited": {
             "Administrators": {
@@ -61,18 +92,6 @@ def test_directory_new(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
             "Guest": {
                 "deny": {
                     "applies to": "This folder, subfolders and files",
@@ -84,12 +103,12 @@ def test_directory_new(file, tmp_path):
     assert permissions == expected
 
 
-def test_directory_new_no_inherit(file, tmp_path):
+def test_directory_new_no_inherit(file, temp_path):
     """
     Test file.directory when the directory does not exist
     Should just return "New Dir"
     """
-    path = os.path.join(tmp_path, "test")
+    path = str(temp_path / "test")
     ret = file.directory(
         name=path,
         makedirs=True,
@@ -104,12 +123,12 @@ def test_directory_new_no_inherit(file, tmp_path):
     assert permissions["Inherited"] == {}
 
 
-def test_directory_new_reset(file, tmp_path):
+def test_directory_new_reset(file, temp_path):
     """
     Test file.directory when the directory does not exist
     Should just return "New Dir"
     """
-    path = os.path.join(tmp_path, "test")
+    path = str(temp_path / "test")
     ret = file.directory(
         name=path,
         makedirs=True,
@@ -128,18 +147,6 @@ def test_directory_new_reset(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
         },
         "Not Inherited": {
             "Administrators": {
@@ -159,12 +166,12 @@ def test_directory_new_reset(file, tmp_path):
     assert permissions == expected
 
 
-def test_directory_new_reset_no_inherit(file, tmp_path):
+def test_directory_new_reset_no_inherit(file, temp_path):
     """
     Test file.directory when the directory does not exist
     Should just return "New Dir"
     """
-    path = os.path.join(tmp_path, "test")
+    path = str(temp_path / "test")
     ret = file.directory(
         name=path,
         makedirs=True,
@@ -196,8 +203,8 @@ def test_directory_new_reset_no_inherit(file, tmp_path):
     assert permissions == expected
 
 
-def test_directory_existing(file, tmp_path):
-    path = str(tmp_path)
+def test_directory_existing(file, temp_path):
+    path = str(temp_path)
     ret = file.directory(
         name=path,
         makedirs=True,
@@ -208,10 +215,9 @@ def test_directory_existing(file, tmp_path):
         "deny_perms": {"Guest": {"permissions": ["write_data", "write_attributes"]}},
         "grant_perms": {"Everyone": {"permissions": "full_control"}},
     }
-    # We are checking these individually because sometimes it will return an
-    # owner if it is running under the Administrator account
-    assert ret["changes"]["deny_perms"] == expected["deny_perms"]
-    assert ret["changes"]["grant_perms"] == expected["grant_perms"]
+    # Sometimes an owner will be set, we don't care about the owner
+    ret["changes"].pop("owner", None)
+    assert ret["changes"] == expected
     permissions = win_dacl.get_permissions(path)
     expected = {
         "Inherited": {
@@ -221,18 +227,6 @@ def test_directory_existing(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
         },
         "Not Inherited": {
             "Administrators": {
@@ -241,18 +235,6 @@ def test_directory_existing(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
             "Everyone": {
                 "grant": {
                     "applies to": "This folder, subfolders and files",
@@ -270,8 +252,8 @@ def test_directory_existing(file, tmp_path):
     assert permissions == expected
 
 
-def test_directory_existing_existing_user(file, tmp_path):
-    path = str(tmp_path)
+def test_directory_existing_existing_user(file, temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Everyone",
@@ -289,10 +271,9 @@ def test_directory_existing_existing_user(file, tmp_path):
         "deny_perms": {"Guest": {"permissions": ["write_data", "write_attributes"]}},
         "grant_perms": {"Everyone": {"permissions": "full_control"}},
     }
-    # We are checking these individually because sometimes it will return an
-    # owner if it is running under the Administrator account
-    assert ret["changes"]["deny_perms"] == expected["deny_perms"]
-    assert ret["changes"]["grant_perms"] == expected["grant_perms"]
+    # Sometimes an owner will be set, we don't care about the owner
+    ret["changes"].pop("owner", None)
+    assert ret["changes"] == expected
     permissions = win_dacl.get_permissions(path)
     expected = {
         "Inherited": {
@@ -302,18 +283,6 @@ def test_directory_existing_existing_user(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
         },
         "Not Inherited": {
             "Administrators": {
@@ -322,18 +291,6 @@ def test_directory_existing_existing_user(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
             "Everyone": {
                 "grant": {
                     "applies to": "This folder, subfolders and files",
@@ -351,8 +308,8 @@ def test_directory_existing_existing_user(file, tmp_path):
     assert permissions == expected
 
 
-def test_directory_existing_no_inherit(file, tmp_path):
-    path = str(tmp_path)
+def test_directory_existing_no_inherit(file, temp_path):
+    path = str(temp_path)
     ret = file.directory(
         name=path,
         makedirs=True,
@@ -365,18 +322,16 @@ def test_directory_existing_no_inherit(file, tmp_path):
         "grant_perms": {"Everyone": {"permissions": "full_control"}},
         "inheritance": False,
     }
-    # We are checking these individually because sometimes it will return an
-    # owner if it is running under the Administrator account
-    assert ret["changes"]["deny_perms"] == expected["deny_perms"]
-    assert ret["changes"]["grant_perms"] == expected["grant_perms"]
-    assert ret["changes"]["inheritance"] == expected["inheritance"]
+    # Sometimes an owner will be set, we don't care about the owner
+    ret["changes"].pop("owner", None)
+    assert ret["changes"] == expected
     assert not win_dacl.get_inheritance(path)
     permissions = win_dacl.get_permissions(path)
     assert permissions["Inherited"] == {}
 
 
-def test_directory_existing_reset(file, tmp_path):
-    path = str(tmp_path)
+def test_directory_existing_reset(file, temp_path):
+    path = str(temp_path)
     win_dacl.set_permissions(
         obj_name=path,
         principal="Guest",
@@ -401,10 +356,9 @@ def test_directory_existing_reset(file, tmp_path):
             }
         },
     }
-    # We are checking these individually because sometimes it will return an
-    # owner if it is running under the Administrator account
-    assert ret["changes"]["grant_perms"] == expected["grant_perms"]
-    assert ret["changes"]["remove_perms"] == expected["remove_perms"]
+    # Sometimes an owner will be set, we don't care about the owner
+    ret["changes"].pop("owner", None)
+    assert ret["changes"] == expected
     permissions = win_dacl.get_permissions(path)
     expected = {
         "Inherited": {
@@ -414,18 +368,6 @@ def test_directory_existing_reset(file, tmp_path):
                     "permissions": "Full control",
                 }
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                }
-            },
         },
         "Not Inherited": {
             "Everyone": {
@@ -439,8 +381,8 @@ def test_directory_existing_reset(file, tmp_path):
     assert permissions == expected
 
 
-def test_directory_existing_reset_no_inherit(file, tmp_path):
-    path = str(tmp_path)
+def test_directory_existing_reset_no_inherit(file, temp_path):
+    path = str(temp_path)
     ret = file.directory(
         name=path,
         makedirs=True,
@@ -461,26 +403,12 @@ def test_directory_existing_reset_no_inherit(file, tmp_path):
                     "permissions": "Full control",
                 },
             },
-            "SYSTEM": {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                },
-            },
-            CURRENT_USER: {
-                "grant": {
-                    "applies to": "This folder, subfolders and files",
-                    "permissions": "Full control",
-                },
-            },
         },
     }
-    # We are checking these individually because sometimes it will return an
-    # owner if it is running under the Administrator account
-    assert ret["changes"]["deny_perms"] == expected["deny_perms"]
-    assert ret["changes"]["grant_perms"] == expected["grant_perms"]
-    assert ret["changes"]["inheritance"] == expected["inheritance"]
-    assert ret["changes"]["remove_perms"] == expected["remove_perms"]
+    # Sometimes an owner will be set, we don't care about the owner
+    ret["changes"].pop("owner", None)
+    assert ret["changes"] == expected
+
     permissions = win_dacl.get_permissions(path)
     expected = {
         "Inherited": {},
diff --git a/tests/pytests/functional/states/file/test_recurse.py b/tests/pytests/functional/states/file/test_recurse.py
index c735d5128da..9b69bbf5fff 100644
--- a/tests/pytests/functional/states/file/test_recurse.py
+++ b/tests/pytests/functional/states/file/test_recurse.py
@@ -7,6 +7,60 @@ pytestmark = [
 ]
 
 
+@pytest.fixture(scope="module")
+def symlink_scenario_1(state_tree):
+    # Create directory structure
+    dir_name = "symlink_scenario_1"
+    source_dir = state_tree / dir_name
+    if not source_dir.is_dir():
+        source_dir.mkdir()
+    source_file = source_dir / "source_file.txt"
+    source_file.write_text("This is the source file...")
+    symlink_file = source_dir / "symlink"
+    symlink_file.symlink_to(source_file)
+    yield dir_name
+
+
+@pytest.fixture(scope="module")
+def symlink_scenario_2(state_tree):
+    # Create directory structure
+    dir_name = "symlink_scenario_2"
+    source_dir = state_tree / dir_name / "test"
+    if not source_dir.is_dir():
+        source_dir.mkdir(parents=True)
+    test1 = source_dir / "test1"
+    test2 = source_dir / "test2"
+    test3 = source_dir / "test3"
+    test_link = source_dir / "test"
+    test1.touch()
+    test2.touch()
+    test3.touch()
+    test_link.symlink_to(test3)
+    yield dir_name
+
+
+@pytest.fixture(scope="module")
+def symlink_scenario_3(state_tree):
+    # Create directory structure
+    dir_name = "symlink_scenario_3"
+    source_dir = state_tree / dir_name
+    if not source_dir.is_dir():
+        source_dir.mkdir(parents=True)
+    # Create a file with the same name but is not a symlink
+    source_file = source_dir / "not_a_symlink" / "symlink"
+    source_file.parent.mkdir(parents=True)
+    source_file.write_text("This is the source file...")
+    # Create other fluff files
+    just_a_file = source_dir / "just_a_file.txt"
+    just_a_file.touch()
+    dummy_file = source_dir / "notasymlink"
+    dummy_file.touch()
+    # Create symlink to source with the same name
+    symlink_file = source_dir / "symlink"
+    symlink_file.symlink_to(source_file)
+    yield dir_name
+
+
 @pytest.mark.parametrize("test", (False, True))
 def test_recurse(file, tmp_path, grail, test):
     """
@@ -249,3 +303,148 @@ def test_issue_2726_mode_kwarg(modules, tmp_path, state_tree):
         ret = modules.state.template_str("\n".join(good_template))
         for state_run in ret:
             assert state_run.result is True
+
+
+def test_issue_64630_keep_symlinks_true(file, symlink_scenario_1, tmp_path):
+    """
+    Make sure that symlinks are created and that there isn't an error when there
+    are no conflicting target files
+    """
+    target_dir = tmp_path / symlink_scenario_1  # Target for the file.recurse state
+    target_file = target_dir / "source_file.txt"
+    target_symlink = target_dir / "symlink"
+
+    ret = file.recurse(
+        name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=True
+    )
+    assert ret.result is True
+
+    assert target_dir.exists()
+    assert target_file.is_file()
+    assert target_symlink.is_symlink()
+
+
+def test_issue_64630_keep_symlinks_false(file, symlink_scenario_1, tmp_path):
+    """
+    Make sure that symlinks are created as files and that there isn't an error
+    """
+    target_dir = tmp_path / symlink_scenario_1  # Target for the file.recurse state
+    target_file = target_dir / "source_file.txt"
+    target_symlink = target_dir / "symlink"
+
+    ret = file.recurse(
+        name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=False
+    )
+    assert ret.result is True
+
+    assert target_dir.exists()
+    assert target_file.is_file()
+    assert target_symlink.is_file()
+    assert target_file.read_text() == target_symlink.read_text()
+
+
+def test_issue_64630_keep_symlinks_conflicting_force_symlinks_false(
+    file, symlink_scenario_1, tmp_path
+):
+    """
+    Make sure that symlinks are not created when there is a conflict. The state
+    should return False
+    """
+    target_dir = tmp_path / symlink_scenario_1  # Target for the file.recurse state
+    target_file = target_dir / "source_file.txt"
+    target_symlink = target_dir / "symlink"
+
+    # Create the conflicting file
+    target_symlink.parent.mkdir(parents=True)
+    target_symlink.touch()
+    assert target_symlink.is_file()
+
+    ret = file.recurse(
+        name=str(target_dir),
+        source=f"salt://{target_dir.name}",
+        keep_symlinks=True,
+        force_symlinks=False,
+    )
+    # We expect it to fail
+    assert ret.result is False
+
+    # And files not to be created properly
+    assert target_dir.exists()
+    assert target_file.is_file()
+    assert target_symlink.is_file()
+
+
+def test_issue_64630_keep_symlinks_conflicting_force_symlinks_true(
+    file, symlink_scenario_1, tmp_path
+):
+    """
+    Make sure that symlinks are created when there is a conflict with an
+    existing file.
+    """
+    target_dir = tmp_path / symlink_scenario_1  # Target for the file.recurse state
+    target_file = target_dir / "source_file.txt"
+    target_symlink = target_dir / "symlink"
+
+    # Create the conflicting file
+    target_symlink.parent.mkdir(parents=True)
+    target_symlink.touch()
+    assert target_symlink.is_file()
+
+    ret = file.recurse(
+        name=str(target_dir),
+        source=f"salt://{target_dir.name}",
+        force_symlinks=True,
+        keep_symlinks=True,
+    )
+    assert ret.result is True
+
+    assert target_dir.exists()
+    assert target_file.is_file()
+    assert target_symlink.is_symlink()
+
+
+def test_issue_64630_keep_symlinks_similar_names(file, symlink_scenario_3, tmp_path):
+    """
+    Make sure that symlinks are created when there is a file that shares part
+    of the name of the actual symlink file. I'm not sure what I'm testing here
+    as I couldn't really get this to fail either way
+    """
+    target_dir = tmp_path / symlink_scenario_3  # Target for the file.recurse state
+    # symlink target, but has the same name as the symlink itself
+    target_source = target_dir / "not_a_symlink" / "symlink"
+    target_symlink = target_dir / "symlink"
+    decoy_file = target_dir / "notasymlink"
+    just_a_file = target_dir / "just_a_file.txt"
+
+    ret = file.recurse(
+        name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=True
+    )
+    assert ret.result is True
+
+    assert target_dir.exists()
+    assert target_source.is_file()
+    assert decoy_file.is_file()
+    assert just_a_file.is_file()
+    assert target_symlink.is_symlink()
+
+
+def test_issue_62117(file, symlink_scenario_2, tmp_path):
+    target_dir = tmp_path / symlink_scenario_2 / "test"
+    target_file_1 = target_dir / "test1"
+    target_file_2 = target_dir / "test2"
+    target_file_3 = target_dir / "test3"
+    target_symlink = target_dir / "test"
+
+    ret = file.recurse(
+        name=str(target_dir),
+        source=f"salt://{target_dir.parent.name}/test",
+        clean=True,
+        keep_symlinks=True,
+    )
+    assert ret.result is True
+
+    assert target_dir.exists()
+    assert target_file_1.is_file()
+    assert target_file_2.is_file()
+    assert target_file_3.is_file()
+    assert target_symlink.is_symlink()
diff --git a/tests/pytests/functional/states/pkgrepo/test_debian.py b/tests/pytests/functional/states/pkgrepo/test_debian.py
index 468bc429565..9cacf657f5b 100644
--- a/tests/pytests/functional/states/pkgrepo/test_debian.py
+++ b/tests/pytests/functional/states/pkgrepo/test_debian.py
@@ -27,6 +27,9 @@ pytestmark = [
 
 @pytest.fixture
 def pkgrepo(states, grains):
+    sources = pathlib.Path("/etc/apt/sources.list")
+    if not sources.exists():
+        sources.touch()
     if grains["os_family"] != "Debian":
         raise pytest.skip.Exception(
             "Test only for debian based platforms", _use_item_location=True
@@ -661,8 +664,8 @@ class Repo:
     @alt_repo.default
     def _default_alt_repo(self):
         """
-        Use an alternative repo, packages do not
-        exist for the OS on repo.saltproject.io
+        Use an alternative repo, packages do not exist for the OS on
+        packages.broadcom.com
         """
         if (
             self.grains["osfullname"] == "Ubuntu"
@@ -690,7 +693,7 @@ class Repo:
 
     @repo_url.default
     def _default_repo_url(self):
-        return f"https://repo.saltproject.io/py3/{self.fullname}/{self.grains['osrelease']}/{self.grains['osarch']}/latest"
+        return "https://packages.broadcom.com/artifactory/saltproject-deb/"
 
     @repo_content.default
     def _default_repo_content(self):
@@ -863,7 +866,7 @@ def test_adding_repo_file_signedby_alt_file(pkgrepo, states, repo):
     assert repo.repo_content in ret.comment
 
     key_file = repo.key_file.parent / "salt-alt-key.gpg"
-    repo_content = "deb [arch=amd64 signed-by={}] https://repo.saltproject.io/py3/debian/10/amd64/latest buster main".format(
+    repo_content = "deb [arch=amd64 signed-by={}] https://packages.broadcom.com/artifactory/saltproject-deb/ buster main".format(
         str(key_file)
     )
     ret = states.pkgrepo.managed(
@@ -925,7 +928,7 @@ def test_adding_repo_file_signedby_fail_key_keyurl(
                 name=repo.repo_content,
                 file=str(repo.repo_file),
                 clean_file=True,
-                key_url="https://repo.saltproject.io/salt/py3/ubuntu/20.04/amd64/latest/SALT-PROJECT-GPG-PUBKEY-2023.pub",
+                key_url="https://packages.broadcom.com/artifactory/api/security/keypair/SaltProjectKey/public",
                 aptkey=False,
             )
 
diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py
index 9337eab2338..d179eaa4ca0 100644
--- a/tests/pytests/functional/states/test_pkg.py
+++ b/tests/pytests/functional/states/test_pkg.py
@@ -20,12 +20,17 @@ pytestmark = [
     pytest.mark.slow_test,
     pytest.mark.skip_if_not_root,
     pytest.mark.destructive_test,
-    pytest.mark.timeout_unless_on_windows(650),
+    pytest.mark.windows_whitelisted,
+    pytest.mark.timeout_unless_on_windows(240),
 ]
 
 
 @pytest.fixture(scope="module", autouse=True)
 def refresh_db(grains, modules):
+
+    if salt.utils.platform.is_windows():
+        modules.winrepo.update_git_repos()
+
     modules.pkg.refresh_db()
 
     # If this is Arch Linux, check if pacman is in use by another process
@@ -43,7 +48,7 @@ def refresh_db(grains, modules):
 def refresh_keys(grains, modules):
     if grains["os_family"] == "Arch":
         # We should be running this periodically when building new test runner
-        # images, otherwise this could take several minuets to complete.
+        # images, otherwise this could take several minutes to complete.
         proc = subprocess.run(["pacman-key", "--refresh-keys"], check=False)
         if proc.returncode != 0:
             pytest.fail("pacman-key --refresh-keys command failed.")
@@ -53,7 +58,7 @@ def refresh_keys(grains, modules):
 def PKG_TARGETS(grains):
     _PKG_TARGETS = ["figlet", "sl"]
     if grains["os"] == "Windows":
-        _PKG_TARGETS = ["vlc", "putty"]
+        _PKG_TARGETS = ["npp_x64", "winrar"]
     elif grains["os"] == "Amazon":
         if grains["osfinger"] == "Amazon Linux-2023":
             _PKG_TARGETS = ["lynx", "gnuplot-minimal"]
@@ -62,9 +67,9 @@ def PKG_TARGETS(grains):
     elif grains["os_family"] == "RedHat":
         if grains["os"] == "VMware Photon OS":
             if grains["osmajorrelease"] >= 5:
-                _PKG_TARGETS = ["wget", "zsh"]
+                _PKG_TARGETS = ["ctags", "zsh"]
             else:
-                _PKG_TARGETS = ["wget", "zsh-html"]
+                _PKG_TARGETS = ["ctags", "zsh-html"]
         elif (
             grains["os"] in ("CentOS Stream", "Rocky", "AlmaLinux")
             and grains["osmajorrelease"] == 9
@@ -108,7 +113,12 @@ def PKG_32_TARGETS(grains):
     _PKG_32_TARGETS = []
     if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon":
         if grains["os"] == "CentOS":
-            _PKG_32_TARGETS.append("xz-devel.i686")
+            if grains["osmajorrelease"] == 5:
+                _PKG_32_TARGETS = ["xz-devel.i386"]
+            else:
+                _PKG_32_TARGETS.append("xz-devel.i686")
+    elif grains["os"] == "Windows":
+        _PKG_32_TARGETS = ["npp", "putty"]
     if not _PKG_32_TARGETS:
         pytest.skip("No 32 bit packages have been specified for testing")
     return _PKG_32_TARGETS
@@ -198,6 +208,23 @@ def latest_version(ctx, modules):
     return run_command
 
 
+@pytest.fixture(scope="function")
+def install_7zip(modules):
+    try:
+        modules.pkg.install(name="7zip", version="22.01.00.0")
+        modules.pkg.install(name="7zip", version="19.00.00.0")
+        versions = modules.pkg.version("7zip")
+        assert "19.00.00.0" in versions
+        assert "22.01.00.0" in versions
+        yield
+    finally:
+        modules.pkg.remove(name="7zip", version="19.00.00.0")
+        modules.pkg.remove(name="7zip", version="22.01.00.0")
+        versions = modules.pkg.version("7zip")
+        assert "19.00.00.0" not in versions
+        assert "22.01.00.0" not in versions
+
+
 @pytest.mark.requires_salt_modules("pkg.version")
 @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed")
 @pytest.mark.slow_test
@@ -261,7 +288,8 @@ def test_pkg_003_installed_multipkg(caplog, PKG_TARGETS, modules, states, grains
     try:
         ret = states.pkg.installed(name=None, pkgs=PKG_TARGETS, refresh=False)
         assert ret.result is True
-        assert "WARNING" not in caplog.text
+        if not salt.utils.platform.is_windows():
+            assert "WARNING" not in caplog.text
     finally:
         ret = states.pkg.removed(name=None, pkgs=PKG_TARGETS)
         assert ret.result is True
@@ -1084,3 +1112,17 @@ def test_pkg_purged_with_removed_pkg(grains, PKG_TARGETS, states, modules):
         "installed": {},
         "removed": {target: {"new": "", "old": version}},
     }
+
+
+@pytest.mark.skip_unless_on_windows()
+def test_pkg_removed_with_version_multiple(install_7zip, modules, states):
+    """
+    This tests removing a specific version of a package when multiple versions
+    are installed. This is specific to Windows. The only version I could find
+    that allowed multiple installs of differing versions was 7zip, so we'll use
+    that.
+    """
+    ret = states.pkg.removed(name="7zip", version="19.00.00.0")
+    assert ret.result is True
+    current = modules.pkg.version("7zip")
+    assert "22.01.00.0" in current
diff --git a/tests/pytests/functional/states/test_service.py b/tests/pytests/functional/states/test_service.py
index fd023df0637..6f02e31fe44 100644
--- a/tests/pytests/functional/states/test_service.py
+++ b/tests/pytests/functional/states/test_service.py
@@ -3,6 +3,7 @@ Tests for the service state
 """
 
 import os
+import subprocess
 
 import pytest
 
@@ -21,6 +22,20 @@ STOPPED = False
 RUNNING = True
 
 
+def _check_systemctl():
+    if not hasattr(_check_systemctl, "memo"):
+        if not salt.utils.platform.is_linux():
+            _check_systemctl.memo = False
+        else:
+            proc = subprocess.run(["systemctl"], capture_output=True, check=False)
+            _check_systemctl.memo = (
+                b"Failed to get D-Bus connection: No such file or directory"
+                in proc.stderr
+                or b"Failed to connect to bus: No such file or directory" in proc.stderr
+            )
+    return _check_systemctl.memo
+
+
 @pytest.fixture
 def service_name(grains, modules):
     # For local testing purposes
@@ -86,6 +101,7 @@ def check_service_status(exp_return, modules, service_name):
 
 
 @pytest.mark.slow_test
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_service_running(service_name, modules, states):
     """
     test service.running state module
@@ -105,6 +121,7 @@ def test_service_running(service_name, modules, states):
 
 
 @pytest.mark.slow_test
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_service_dead(service_name, modules, states):
     """
     test service.dead state module
@@ -119,6 +136,7 @@ def test_service_dead(service_name, modules, states):
 
 
 @pytest.mark.slow_test
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
 def test_service_dead_init_delay(service_name, modules, states):
     """
     test service.dead state module
diff --git a/tests/pytests/functional/test_version.py b/tests/pytests/functional/test_version.py
index 1c198abce7d..cacd953c05f 100644
--- a/tests/pytests/functional/test_version.py
+++ b/tests/pytests/functional/test_version.py
@@ -25,6 +25,11 @@ def salt_extension(tmp_path_factory):
 
 def test_salt_extensions_in_versions_report(tmp_path, salt_extension):
     with SaltVirtualEnv(venv_dir=tmp_path / ".venv") as venv:
+        # These are required for the test to pass, why are they not already
+        # installed?
+        venv.install("pyyaml")
+        venv.install("looseversion")
+        venv.install("packaging")
         # Install our extension into the virtualenv
         venv.install(str(salt_extension.srcdir))
         installed_packages = venv.get_installed_packages()
@@ -47,6 +52,12 @@ def test_salt_extensions_absent_in_versions_report(tmp_path, salt_extension):
     Ensure that the 'Salt Extensions' header does not show up when no extension is installed
     """
     with SaltVirtualEnv(venv_dir=tmp_path / ".venv") as venv:
+        # These are required for the test to pass, why are they not already
+        # installed?
+        venv.install("pyyaml")
+        venv.install("looseversion")
+        venv.install("packaging")
+        venv.install("distro")
         installed_packages = venv.get_installed_packages()
         assert salt_extension.name not in installed_packages
         ret = venv.run_code(
diff --git a/tests/pytests/functional/transport/base.py b/tests/pytests/functional/transport/base.py
new file mode 100644
index 00000000000..de573f05fde
--- /dev/null
+++ b/tests/pytests/functional/transport/base.py
@@ -0,0 +1,194 @@
+import multiprocessing
+import stat
+import time
+
+import pytest
+
+import salt.transport.base
+import salt.transport.tcp
+import salt.transport.ws
+import salt.transport.zeromq
+
+
+@pytest.mark.parametrize("kind", salt.transport.base.TRANSPORTS)
+def test_master_ipc_socket_perms(kind, tmp_path):
+    opts = {
+        "ipc_mode": "ipc",
+        "hash_type": "md5",
+        "hash_id": "master",
+        "id": "master",
+        "sock_dir": str(tmp_path),
+    }
+    server = salt.transport.base.ipc_publish_server("master", opts)
+
+    # IPC Server always uses tcp transport, this could change in the future.
+    assert isinstance(server, salt.transport.tcp.PublishServer)
+
+    proc = multiprocessing.Process(target=server.publish_daemon, args=(lambda x: x,))
+    proc.start()
+    time.sleep(1)
+    try:
+        pub_path = tmp_path / "master_event_pub.ipc"
+        assert pub_path.exists()
+        status = pub_path.stat()
+
+        assert status.st_mode & stat.S_IRUSR
+        assert status.st_mode & stat.S_IWUSR
+        assert not status.st_mode & stat.S_IXUSR
+
+        assert status.st_mode & stat.S_IRGRP
+        assert status.st_mode & stat.S_IWGRP
+        assert not status.st_mode & stat.S_IXGRP
+
+        assert not status.st_mode & stat.S_IROTH
+        assert not status.st_mode & stat.S_IWOTH
+        assert not status.st_mode & stat.S_IXOTH
+
+        pull_path = tmp_path / "master_event_pull.ipc"
+        status = pull_path.stat()
+
+        assert status.st_mode & stat.S_IRUSR
+        assert status.st_mode & stat.S_IWUSR
+        assert not status.st_mode & stat.S_IXUSR
+
+        assert not status.st_mode & stat.S_IRGRP
+        assert not status.st_mode & stat.S_IWGRP
+        assert not status.st_mode & stat.S_IXGRP
+
+        assert not status.st_mode & stat.S_IROTH
+        assert not status.st_mode & stat.S_IWOTH
+        assert not status.st_mode & stat.S_IXOTH
+    finally:
+        proc.terminate()
+        proc.join()
+        proc.close()
+
+
+@pytest.mark.parametrize("kind", salt.transport.base.TRANSPORTS)
+def test_minion_ipc_socket_perms(kind, tmp_path):
+    opts = {
+        "ipc_mode": "ipc",
+        "hash_type": "md5",
+        "hash_id": "minion",
+        "id": "minion",
+        "sock_dir": str(tmp_path),
+    }
+    server = salt.transport.base.ipc_publish_server("minion", opts)
+
+    # IPC Server always uses tcp transport, this could change in the future.
+    assert isinstance(server, salt.transport.tcp.PublishServer)
+
+    proc = multiprocessing.Process(target=server.publish_daemon, args=(lambda x: x,))
+    proc.start()
+    time.sleep(1)
+    try:
+        id_hash = salt.transport.base._minion_hash(
+            hash_type=opts["hash_type"],
+            minion_id=opts.get("hash_id", opts["id"]),
+        )
+        pub_path = tmp_path / f"minion_event_{id_hash}_pub.ipc"
+        assert pub_path.exists()
+        status = pub_path.stat()
+
+        assert status.st_mode & stat.S_IRUSR
+        assert status.st_mode & stat.S_IWUSR
+        assert not status.st_mode & stat.S_IXUSR
+
+        assert not status.st_mode & stat.S_IRGRP
+        assert not status.st_mode & stat.S_IWGRP
+        assert not status.st_mode & stat.S_IXGRP
+
+        assert not status.st_mode & stat.S_IROTH
+        assert not status.st_mode & stat.S_IWOTH
+        assert not status.st_mode & stat.S_IXOTH
+
+        pull_path = tmp_path / f"minion_event_{id_hash}_pull.ipc"
+        status = pull_path.stat()
+
+        assert status.st_mode & stat.S_IRUSR
+        assert status.st_mode & stat.S_IWUSR
+        assert not status.st_mode & stat.S_IXUSR
+
+        assert not status.st_mode & stat.S_IRGRP
+        assert not status.st_mode & stat.S_IWGRP
+        assert not status.st_mode & stat.S_IXGRP
+
+        assert not status.st_mode & stat.S_IROTH
+        assert not status.st_mode & stat.S_IWOTH
+        assert not status.st_mode & stat.S_IXOTH
+    finally:
+        proc.terminate()
+        proc.join()
+        proc.close()
+
+
+TRANSPORT_MAP = {
+    "zeromq": salt.transport.zeromq.PublishServer,
+    "tcp": salt.transport.tcp.PublishServer,
+    "ws": salt.transport.ws.PublishServer,
+}
+
+
+def test_check_all_transports():
+    """
+    Ensure we are testing all existing transports. If adding a transport it
+    should be tested by 'test_transport_socket_perms_conform'.
+    """
+    assert sorted(TRANSPORT_MAP.keys()) == sorted(salt.transport.base.TRANSPORTS)
+
+
+@pytest.mark.parametrize("kind", salt.transport.base.TRANSPORTS)
+def test_transport_socket_perms_conform(kind, tmp_path):
+    opts = {
+        "ipc_mode": "ipc",
+        "hash_type": "md5",
+        "hash_id": "master",
+        "id": "master",
+        "ipv6": False,
+        "sock_dir": str(tmp_path),
+    }
+    kwargs = {
+        "pub_path": str(tmp_path / "pub.ipc"),
+        "pull_path": str(tmp_path / "pull.ipc"),
+        "pub_path_perms": 0o660,
+    }
+    server = TRANSPORT_MAP[kind](opts, **kwargs)
+
+    proc = multiprocessing.Process(target=server.publish_daemon, args=(lambda x: x,))
+    proc.start()
+    time.sleep(1)
+    try:
+        pub_path = tmp_path / "pub.ipc"
+        assert pub_path.exists()
+        status = pub_path.stat()
+
+        assert status.st_mode & stat.S_IRUSR
+        assert status.st_mode & stat.S_IWUSR
+        assert not status.st_mode & stat.S_IXUSR
+
+        assert status.st_mode & stat.S_IRGRP
+        assert status.st_mode & stat.S_IWGRP
+        assert not status.st_mode & stat.S_IXGRP
+
+        assert not status.st_mode & stat.S_IROTH
+        assert not status.st_mode & stat.S_IWOTH
+        assert not status.st_mode & stat.S_IXOTH
+
+        pull_path = tmp_path / "pull.ipc"
+        status = pull_path.stat()
+
+        assert status.st_mode & stat.S_IRUSR
+        assert status.st_mode & stat.S_IWUSR
+        assert not status.st_mode & stat.S_IXUSR
+
+        assert not status.st_mode & stat.S_IRGRP
+        assert not status.st_mode & stat.S_IWGRP
+        assert not status.st_mode & stat.S_IXGRP
+
+        assert not status.st_mode & stat.S_IROTH
+        assert not status.st_mode & stat.S_IWOTH
+        assert not status.st_mode & stat.S_IXOTH
+    finally:
+        proc.terminate()
+        proc.join()
+        proc.close()
diff --git a/tests/pytests/functional/utils/gitfs/test_pillar.py b/tests/pytests/functional/utils/gitfs/test_pillar.py
index 8e5a1aa52ca..a30d0ed137b 100644
--- a/tests/pytests/functional/utils/gitfs/test_pillar.py
+++ b/tests/pytests/functional/utils/gitfs/test_pillar.py
@@ -32,6 +32,8 @@ except ImportError:
 skipif_no_gitpython = pytest.mark.skipif(not HAS_GITPYTHON, reason="Missing gitpython")
 skipif_no_pygit2 = pytest.mark.skipif(not HAS_PYGIT2, reason="Missing pygit2")
 
+testgitfs = "https://github.com/saltstack/salt-test-pillar-gitfs.git"
+
 
 @pytest.fixture
 def pillar_opts(salt_factories, tmp_path):
@@ -72,9 +74,7 @@ def _get_pillar(opts, *remotes):
 
 @skipif_no_gitpython
 def test_gitpython_pillar_provider(gitpython_pillar_opts):
-    p = _get_pillar(
-        gitpython_pillar_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git"
-    )
+    p = _get_pillar(gitpython_pillar_opts, testgitfs)
     assert len(p.remotes) == 1
     assert p.provider == "gitpython"
     assert isinstance(p.remotes[0], GitPython)
@@ -82,18 +82,14 @@ def test_gitpython_pillar_provider(gitpython_pillar_opts):
 
 @skipif_no_pygit2
 def test_pygit2_pillar_provider(pygit2_pillar_opts):
-    p = _get_pillar(
-        pygit2_pillar_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git"
-    )
+    p = _get_pillar(pygit2_pillar_opts, testgitfs)
     assert len(p.remotes) == 1
     assert p.provider == "pygit2"
     assert isinstance(p.remotes[0], Pygit2)
 
 
 def _test_env(opts):
-    p = _get_pillar(
-        opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git"
-    )
+    p = _get_pillar(opts, f"__env__ {testgitfs}")
     assert len(p.remotes) == 1
     p.checkout()
     repo = p.remotes[0]
@@ -102,9 +98,7 @@ def _test_env(opts):
     for f in (".gitignore", "README.md", "file.sls", "top.sls"):
         assert f in files
     opts["pillarenv"] = "main"
-    p2 = _get_pillar(
-        opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git"
-    )
+    p2 = _get_pillar(opts, f"__env__ {testgitfs}")
     assert len(p.remotes) == 1
     p2.checkout()
     repo2 = p2.remotes[0]
@@ -165,9 +159,9 @@ def test_pygit2_checkout_fetch_on_fail(pygit2_pillar_opts):
 def _test_multiple_repos(opts):
     p = _get_pillar(
         opts,
-        "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git",
-        "main https://github.com/saltstack/salt-test-pillar-gitfs.git",
-        "branch https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        f"__env__ {testgitfs}",
+        f"main {testgitfs}",
+        f"branch {testgitfs}",
         "__env__ https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
         "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
     )
@@ -179,9 +173,9 @@ def _test_multiple_repos(opts):
 
     p2 = _get_pillar(
         opts,
-        "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git",
-        "main https://github.com/saltstack/salt-test-pillar-gitfs.git",
-        "branch https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        f"__env__ {testgitfs}",
+        f"main {testgitfs}",
+        f"branch {testgitfs}",
         "__env__ https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
         "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
     )
@@ -194,9 +188,9 @@ def _test_multiple_repos(opts):
     opts["pillarenv"] = "main"
     p3 = _get_pillar(
         opts,
-        "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git",
-        "main https://github.com/saltstack/salt-test-pillar-gitfs.git",
-        "branch https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        f"__env__ {testgitfs}",
+        f"main {testgitfs}",
+        f"branch {testgitfs}",
         "__env__ https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
         "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
     )
@@ -227,15 +221,13 @@ def test_pygit2_multiple_repos(pygit2_pillar_opts):
 def _test_fetch_request(opts):
     p = _get_pillar(
         opts,
-        "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        f"__env__ {testgitfs}",
         "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
     )
     frequest = os.path.join(p.remotes[0].get_salt_working_dir(), "fetch_request")
     frequest_other = os.path.join(p.remotes[1].get_salt_working_dir(), "fetch_request")
     opts["pillarenv"] = "main"
-    p2 = _get_pillar(
-        opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git"
-    )
+    p2 = _get_pillar(opts, f"__env__ {testgitfs}")
     frequest2 = os.path.join(p2.remotes[0].get_salt_working_dir(), "fetch_request")
     assert frequest != frequest2
     assert os.path.isfile(frequest) is False
@@ -277,15 +269,13 @@ def test_pygit2_fetch_request(pygit2_pillar_opts):
 def _test_clear_old_remotes(opts):
     p = _get_pillar(
         opts,
-        "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        f"__env__ {testgitfs}",
         "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git",
     )
     repo = p.remotes[0]
     repo2 = p.remotes[1]
     opts["pillarenv"] = "main"
-    p2 = _get_pillar(
-        opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git"
-    )
+    p2 = _get_pillar(opts, f"__env__ {testgitfs}")
     repo3 = p2.remotes[0]
     assert os.path.isdir(repo.get_cachedir()) is True
     assert os.path.isdir(repo2.get_cachedir()) is True
@@ -313,7 +303,7 @@ def test_pygit2_clear_old_remotes(pygit2_pillar_opts):
 def _test_remote_map(opts):
     p = _get_pillar(
         opts,
-        "https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        testgitfs,
     )
     p.fetch_remotes()
     assert len(p.remotes) == 1
@@ -335,7 +325,7 @@ def test_pygit2_remote_map(pygit2_pillar_opts):
 def _test_lock(opts):
     p = _get_pillar(
         opts,
-        "https://github.com/saltstack/salt-test-pillar-gitfs.git",
+        testgitfs,
     )
     p.fetch_remotes()
     assert len(p.remotes) == 1
@@ -345,8 +335,7 @@ def _test_lock(opts):
     assert repo.lock() == (
         [
             (
-                f"Set update lock for git_pillar remote "
-                f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'"
+                f"Set update lock for git_pillar remote '{testgitfs}' on machine_id '{mach_id}'"
             )
         ],
         [],
@@ -355,8 +344,7 @@ def _test_lock(opts):
     assert repo.clear_lock() == (
         [
             (
-                f"Removed update lock for git_pillar remote "
-                f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'"
+                f"Removed update lock for git_pillar remote '{testgitfs}' on machine_id '{mach_id}'"
             )
         ],
         [],
diff --git a/tests/pytests/functional/utils/test_process.py b/tests/pytests/functional/utils/test_process.py
index ed165ea3e91..14525c426af 100644
--- a/tests/pytests/functional/utils/test_process.py
+++ b/tests/pytests/functional/utils/test_process.py
@@ -5,6 +5,10 @@ tests.pytests.functional.utils.test_process
 Test salt's process utility module
 """
 
+import os
+import pathlib
+import time
+
 import pytest
 
 import salt.utils.process
@@ -35,3 +39,35 @@ def test_process_manager_60749(process_manager):
 
     process_manager.add_process(Process)
     process_manager.check_children()
+
+
+def _get_num_fds(pid):
+    "Determine the number of open fds for a process, linux only."
+    return len(list(pathlib.Path(f"/proc/{pid}/fd").iterdir()))
+
+
+@pytest.mark.skip_unless_on_linux
+def test_subprocess_list_fds():
+    pid = os.getpid()
+    process_list = salt.utils.process.SubprocessList()
+
+    before_num = _get_num_fds(pid)
+
+    def target():
+        pass
+
+    process = salt.utils.process.SignalHandlingProcess(target=target)
+    process.start()
+
+    process_list.add(process)
+    time.sleep(0.3)
+
+    num = _get_num_fds(pid)
+    assert num == before_num + 2
+    start = time.time()
+    while time.time() - start < 1:
+        process_list.cleanup()
+        if not process_list.processes:
+            break
+    assert len(process_list.processes) == 0
+    assert _get_num_fds(pid) == num - 2
diff --git a/tests/pytests/functional/utils/win_dacl/test_file.py b/tests/pytests/functional/utils/win_dacl/test_file.py
index 7de08f03422..9709860c5d6 100644
--- a/tests/pytests/functional/utils/win_dacl/test_file.py
+++ b/tests/pytests/functional/utils/win_dacl/test_file.py
@@ -1,3 +1,6 @@
+import os
+import tempfile
+
 import pytest
 
 import salt.utils.win_dacl as win_dacl
@@ -18,9 +21,24 @@ def configure_loader_modules(minion_opts):
     }
 
 
+@pytest.fixture(scope="module")
+def user_temp_dir():
+    """
+    Return the user's temp directory if available
+
+    Some of the tests fail if using system temp directories
+    """
+    if "TMP" in os.environ and os.path.exists(os.environ["TMP"]):
+        return os.environ["TMP"]
+    return tempfile.gettempdir()
+
+
 @pytest.fixture(scope="function")
-def test_file():
-    with pytest.helpers.temp_file("dacl_test.file") as test_file:
+def test_file(tmp_path_factory, user_temp_dir):
+
+    with pytest.helpers.temp_file(
+        "dacl_test.file", directory=user_temp_dir
+    ) as test_file:
         yield test_file
 
 
@@ -671,8 +689,10 @@ def test_get_set_inheritance(test_file):
     assert result is False
 
 
-def test_copy_security():
-    with pytest.helpers.temp_file("source_test.file") as source:
+def test_copy_security(user_temp_dir):
+    with pytest.helpers.temp_file(
+        "source_test.file", directory=user_temp_dir
+    ) as source:
         # Set permissions on Source
         result = win_dacl.set_permissions(
             obj_name=str(source),
@@ -697,7 +717,9 @@ def test_copy_security():
         )
         assert result is True
 
-        with pytest.helpers.temp_file("target_test.file") as target:
+        with pytest.helpers.temp_file(
+            "target_test.file", directory=user_temp_dir
+        ) as target:
             # Copy security from Source to Target
             result = win_dacl.copy_security(source=str(source), target=str(target))
             assert result is True
diff --git a/tests/pytests/functional/utils/yamllint/test_yamllint.py b/tests/pytests/functional/utils/yamllint/test_yamllint.py
index 403c6fc610e..3c730523c4d 100644
--- a/tests/pytests/functional/utils/yamllint/test_yamllint.py
+++ b/tests/pytests/functional/utils/yamllint/test_yamllint.py
@@ -7,7 +7,7 @@ import salt.utils.versions as versions
 try:
     import salt.utils.yamllint as yamllint
 
-    YAMLLINT_AVAILABLE = True
+    YAMLLINT_AVAILABLE = yamllint.has_yamllint()
 except ImportError:
     YAMLLINT_AVAILABLE = False
 
diff --git a/tests/pytests/integration/cli/test_salt.py b/tests/pytests/integration/cli/test_salt.py
index 37925160ca6..90e3eed6d78 100644
--- a/tests/pytests/integration/cli/test_salt.py
+++ b/tests/pytests/integration/cli/test_salt.py
@@ -2,6 +2,7 @@
 :codeauthor: Thayne Harbaugh (tharbaug@adobe.com)
 """
 
+import glob
 import logging
 import os
 import shutil
@@ -276,3 +277,28 @@ def test_minion_65400(salt_cli, salt_minion, salt_minion_2, salt_master):
         for minion_id in ret.data:
             assert ret.data[minion_id] != "Error: test.configurable_test_state"
             assert isinstance(ret.data[minion_id], dict)
+
+
+@pytest.mark.skip_on_windows(reason="Windows does not support SIGUSR1")
+def test_sigusr1_handler(salt_master, salt_minion):
+    """
+    Ensure SIGUSR1 handler works.
+
+    Refer to https://docs.saltproject.io/en/latest/topics/troubleshooting/minion.html#live-python-debug-output for more details.
+    """
+    tb_glob = os.path.join(tempfile.gettempdir(), "salt-debug-*.log")
+    tracebacks_before = glob.glob(tb_glob)
+    os.kill(salt_minion.pid, signal.SIGUSR1)
+    for i in range(10):
+        if len(glob.glob(tb_glob)) - len(tracebacks_before) == 1:
+            break
+        time.sleep(1)
+
+    os.kill(salt_master.pid, signal.SIGUSR1)
+    for i in range(10):
+        if len(glob.glob(tb_glob)) - len(tracebacks_before) == 2:
+            break
+        time.sleep(1)
+
+    tracebacks_after = glob.glob(tb_glob)
+    assert len(tracebacks_after) - len(tracebacks_before) == 2
diff --git a/tests/pytests/integration/cli/test_salt_call.py b/tests/pytests/integration/cli/test_salt_call.py
index 1d770c0ffbe..f927f499c85 100644
--- a/tests/pytests/integration/cli/test_salt_call.py
+++ b/tests/pytests/integration/cli/test_salt_call.py
@@ -1,18 +1,24 @@
 import copy
 import logging
 import os
+import pathlib
 import pprint
 import re
+import shutil
 import sys
 
 import pytest
+from saltfactories.utils import random_string
 
 import salt.defaults.exitcodes
 import salt.utils.files
 import salt.utils.json
 import salt.utils.platform
 import salt.utils.yaml
-from tests.support.helpers import PRE_PYTEST_SKIP, PRE_PYTEST_SKIP_REASON
+import tests.conftest
+import tests.support.helpers
+from tests.conftest import FIPS_TESTRUN
+from tests.support.runtests import RUNTIME_VARS
 
 pytestmark = [
     pytest.mark.core_test,
@@ -95,7 +101,7 @@ def test_local_salt_call(salt_call_cli):
         assert contents.count("foo") == 1, contents
 
 
-@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON)
+@pytest.mark.skip_on_windows(reason=tests.support.helpers.PRE_PYTEST_SKIP_REASON)
 def test_user_delete_kw_output(salt_call_cli):
     ret = salt_call_cli.run("-d", "user.delete", _timeout=120)
     assert ret.returncode == 0
@@ -126,7 +132,7 @@ def test_issue_6973_state_highstate_exit_code(salt_call_cli):
     assert expected_comment in ret.stdout
 
 
-@PRE_PYTEST_SKIP
+@tests.support.helpers.PRE_PYTEST_SKIP
 def test_issue_15074_output_file_append(salt_call_cli):
 
     with pytest.helpers.temp_file(name="issue-15074") as output_file_append:
@@ -154,7 +160,7 @@ def test_issue_15074_output_file_append(salt_call_cli):
         assert second_run_output == first_run_output + first_run_output
 
 
-@PRE_PYTEST_SKIP
+@tests.support.helpers.PRE_PYTEST_SKIP
 def test_issue_14979_output_file_permissions(salt_call_cli):
     with pytest.helpers.temp_file(name="issue-14979") as output_file:
         with salt.utils.files.set_umask(0o077):
@@ -307,7 +313,7 @@ def test_syslog_file_not_found(salt_minion, salt_call_cli, tmp_path):
             assert "Failed to setup the Syslog logging handler" in ret.stderr
 
 
-@PRE_PYTEST_SKIP
+@tests.support.helpers.PRE_PYTEST_SKIP
 @pytest.mark.skip_on_windows
 def test_return(salt_call_cli, salt_run_cli):
     command = "echo returnTOmaster"
@@ -429,3 +435,99 @@ def test_local_salt_call_no_function_no_retcode(salt_call_cli):
     assert "test" in ret.data
     assert ret.data["test"] == "'test' is not available."
     assert "test.echo" in ret.data
+
+
+@pytest.fixture
+def master_id_alt():
+    master_id = random_string("master-")
+    yield master_id
+
+
+@pytest.fixture
+def minion_id_alt():
+    master_id = random_string("minion-")
+    yield master_id
+
+
+@pytest.fixture
+def salt_master_alt(salt_factories, tmp_path, master_id_alt):
+    """
+    A running salt-master fixture
+    """
+    root_dir = salt_factories.get_root_dir_for_daemon(master_id_alt)
+    conf_dir = root_dir / "conf"
+    conf_dir.mkdir(exist_ok=True)
+    extension_modules_path = str(root_dir / "extension_modules")
+    if not os.path.exists(extension_modules_path):
+        shutil.copytree(
+            os.path.join(RUNTIME_VARS.FILES, "extension_modules"),
+            extension_modules_path,
+        )
+    cache = pathlib.Path(extension_modules_path) / "cache"
+    cache.mkdir()
+    localfs = cache / "localfs.py"
+    localfs.write_text(
+        tests.support.helpers.dedent(
+            """
+        from salt.exceptions import SaltClientError
+        def store(bank, key, data): # , cachedir):
+            raise SaltClientError("TEST")
+        """
+        )
+    )
+    factory = salt_factories.salt_master_daemon(
+        master_id_alt,
+        defaults={
+            "root_dir": str(root_dir),
+            "extension_modules": extension_modules_path,
+            "auto_accept": True,
+        },
+        overrides={
+            "fips_mode": FIPS_TESTRUN,
+            "publish_signing_algorithm": (
+                "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1"
+            ),
+        },
+    )
+    with factory.pillar_tree.base.temp_file("cve_2024_37088.sls", "foobar: bang"):
+        with factory.state_tree.base.temp_file(
+            "cve_2024_37088.sls",
+            """
+            # cvs_2024_37088.sls
+            {{%- set var = salt ['pillar.get']('foobar', 'state default') %}}
+
+            test:
+              file.managed:
+                - name: {0}
+                - contents: {{{{ var }}}}
+            """.format(
+                tmp_path / "cve_2024_37088.txt"
+            ),
+        ):
+            with factory.started():
+                yield factory
+
+
+@pytest.fixture
+def salt_call_alt(salt_master_alt, minion_id_alt):
+    minion_factory = salt_master_alt.salt_minion_daemon(
+        minion_id_alt,
+        overrides={
+            "fips_mode": tests.conftest.FIPS_TESTRUN,
+            "encryption_algorithm": (
+                "OAEP-SHA224" if tests.conftest.FIPS_TESTRUN else "OAEP-SHA1"
+            ),
+            "signing_algorithm": (
+                "PKCS1v15-SHA224" if tests.conftest.FIPS_TESTRUN else "PKCS1v15-SHA1"
+            ),
+        },
+    )
+    return minion_factory.salt_call_cli()
+
+
+def test_cve_2024_37088(salt_master_alt, salt_call_alt, caplog):
+    with caplog.at_level(logging.ERROR):
+        ret = salt_call_alt.run("state.sls", "cve_2024_37088")
+        assert ret.returncode == 1
+        assert ret.data is None
+        assert "Got a bad pillar from master, type str, expecting dict" in caplog.text
diff --git a/tests/pytests/integration/grains/test_custom.py b/tests/pytests/integration/grains/test_custom.py
new file mode 100644
index 00000000000..6eeab4deab1
--- /dev/null
+++ b/tests/pytests/integration/grains/test_custom.py
@@ -0,0 +1,20 @@
+"""
+Test the custom grains
+"""
+
+import pytest
+
+pytestmark = [
+    pytest.mark.windows_whitelisted,
+    pytest.mark.slow_test,
+]
+
+
+def test_grains_passed_to_custom_grain(salt_call_cli):
+    """
+    test if current grains are passed to grains module functions that have a grains argument
+    """
+    ret = salt_call_cli.run("grains.item", "custom_grain_test")
+    assert ret.returncode == 0
+    assert ret.data
+    assert ret.data["custom_grain_test"] == "itworked"
diff --git a/tests/pytests/integration/loader/test_ext_grains.py b/tests/pytests/integration/loader/test_ext_grains.py
index 63997db27e2..227859db03f 100644
--- a/tests/pytests/integration/loader/test_ext_grains.py
+++ b/tests/pytests/integration/loader/test_ext_grains.py
@@ -8,6 +8,20 @@ def test_grains_overwrite(salt_cli, salt_master, salt_minion):
     # Force a grains sync
     salt_cli.run("saltutil.sync_grains", minion_tgt=salt_minion.id)
 
+    # XXX: This should no longer be neede because of using salt_cli.run.
+    # To avoid a race condition on Windows, we need to make sure the
+    # `test_custom_grain2.py` file is present in the _grains directory
+    # before trying to get the grains. This test may execute before the
+    # minion has finished syncing down the files it needs.
+    # module = os.path.join(
+    #    salt_minion.config["cachedir"],
+    #    "files",
+    #    "base",
+    #    "_grains",
+    #    "custom_grain2.py",
+    # )
+    # assert os.path.exists(module)
+
     # Check that custom grains are overwritten
     ret = salt_cli.run("grains.items", minion_tgt=salt_minion.id)
     assert ret.data["a_custom"] == {"k2": "v2"}
diff --git a/tests/pytests/integration/master/test_ipc_perms.py b/tests/pytests/integration/master/test_ipc_perms.py
new file mode 100644
index 00000000000..85f70be893d
--- /dev/null
+++ b/tests/pytests/integration/master/test_ipc_perms.py
@@ -0,0 +1,12 @@
+import pathlib
+import stat
+
+
+def test_master_event_pub_ipc_perms(salt_master):
+    pub_path = pathlib.Path(salt_master.config["sock_dir"]) / "master_event_pub.ipc"
+    assert pub_path.exists()
+    status = pub_path.stat()
+    assert status.st_mode & stat.S_IRUSR
+    assert status.st_mode & stat.S_IWUSR
+    assert status.st_mode & stat.S_IRGRP
+    assert status.st_mode & stat.S_IWGRP
diff --git a/tests/pytests/integration/minion/test_schedule_large_event.py b/tests/pytests/integration/minion/test_schedule_large_event.py
new file mode 100644
index 00000000000..bbc44611e13
--- /dev/null
+++ b/tests/pytests/integration/minion/test_schedule_large_event.py
@@ -0,0 +1,105 @@
+import sys
+
+import pytest
+
+import salt.utils.event
+import salt.utils.platform
+import tests.support.helpers
+from tests.conftest import FIPS_TESTRUN
+
+
+@pytest.fixture
+def salt_master_1(request, salt_factories):
+    config_defaults = {
+        "open_mode": True,
+        "transport": request.config.getoption("--transport"),
+    }
+    config_overrides = {
+        "interface": "127.0.0.1",
+        "fips_mode": FIPS_TESTRUN,
+        "publish_signing_algorithm": (
+            "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1"
+        ),
+    }
+
+    factory = salt_factories.salt_master_daemon(
+        "master-1",
+        defaults=config_defaults,
+        overrides=config_overrides,
+        extra_cli_arguments_after_first_start_failure=["--log-level=info"],
+    )
+    with factory.started(start_timeout=120):
+        yield factory
+
+
+@pytest.fixture
+def salt_minion_1(salt_master_1):
+    config_defaults = {
+        "transport": salt_master_1.config["transport"],
+    }
+    master_1_port = salt_master_1.config["ret_port"]
+    master_1_addr = salt_master_1.config["interface"]
+    config_overrides = {
+        "master": [
+            f"{master_1_addr}:{master_1_port}",
+        ],
+        "test.foo": "baz",
+        "fips_mode": FIPS_TESTRUN,
+        "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1",
+        "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1",
+    }
+    factory = salt_master_1.salt_minion_daemon(
+        "minion-1",
+        defaults=config_defaults,
+        overrides=config_overrides,
+        extra_cli_arguments_after_first_start_failure=["--log-level=info"],
+    )
+    with factory.started(start_timeout=120):
+        yield factory
+
+
+@pytest.fixture
+def script(salt_minion_1, tmp_path):
+    path = tmp_path / "script.py"
+    content = f"""
+    import salt.config
+    import salt.utils.event
+
+    opts = salt.config.minion_config('{salt_minion_1.config_file}')
+
+    payload =  b'0' * 1048576000
+
+    big_event = dict()
+    for i in range(10000):
+        big_event[i] = payload =  b'0' * 100
+
+    with salt.utils.event.get_event("minion", opts=opts) as event:
+        event.fire_master(big_event, 'bigevent')
+
+    """
+    path.write_text(tests.support.helpers.dedent(content))
+    return path
+
+
+# @pytest.mark.timeout_unless_on_windows(360)
+def test_schedule_large_event(salt_master_1, salt_minion_1, script):
+    cli = salt_master_1.salt_cli(timeout=120)
+    ret = cli.run(
+        "schedule.add",
+        name="myjob",
+        function="cmd.run",
+        seconds=5,
+        job_args=f'["{sys.executable} {script}"]',
+        minion_tgt=salt_minion_1.id,
+    )
+    assert "result" in ret.data
+    assert ret.data["result"]
+    with salt.utils.event.get_event(
+        "master",
+        salt_master_1.config["sock_dir"],
+        salt_master_1.config,
+    ) as event:
+        event = event.get_event(tag="bigevent", wait=15)
+        assert event
+        assert "data" in event
+        assert len(event["data"]) == 10000
diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py
index b9428b99465..09bd51dc989 100644
--- a/tests/pytests/integration/netapi/test_ssh_client.py
+++ b/tests/pytests/integration/netapi/test_ssh_client.py
@@ -151,7 +151,7 @@ def test_shell_inject_ssh_priv(
     """
     # ZDI-CAN-11143
     path = tmp_path / "test-11143"
-    tgts = ["repo.saltproject.io", "www.zerodayinitiative.com"]
+    tgts = ["packages.broadcom.com", "www.zerodayinitiative.com"]
     ret = None
     for tgt in tgts:
         low = {
diff --git a/tests/pytests/integration/pillar/test_httpclient_in_pillar.py b/tests/pytests/integration/pillar/test_httpclient_in_pillar.py
index 905a8f51cb3..666e51de52c 100644
--- a/tests/pytests/integration/pillar/test_httpclient_in_pillar.py
+++ b/tests/pytests/integration/pillar/test_httpclient_in_pillar.py
@@ -5,7 +5,7 @@ def test_pillar_using_http_query(salt_master, salt_minion, salt_cli, tmp_path):
         - http_pillar_test
     """
     my_pillar = """
-    {%- set something = salt['http.query']('https://raw.githubusercontent.com/saltstack/salt/master/.pre-commit-config.yaml', raise_error=False, verify_ssl=False, status=True, timeout=5).status %}
+    {%- set something = salt['http.query']('https://raw.githubusercontent.com/saltstack/salt/master/.pre-commit-config.yaml', raise_error=False, verify_ssl=False, status=True, timeout=15).status %}
     http_query_test: {{ something }}
     """
 
diff --git a/tests/pytests/integration/ssh/state/test_parallel.py b/tests/pytests/integration/ssh/state/test_parallel.py
new file mode 100644
index 00000000000..8ff9d7db139
--- /dev/null
+++ b/tests/pytests/integration/ssh/state/test_parallel.py
@@ -0,0 +1,61 @@
+"""
+Verify salt-ssh states support ``parallel``.
+"""
+
+import pytest
+
+pytestmark = [
+    pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
+    pytest.mark.slow_test,
+]
+
+
+@pytest.fixture(scope="module", autouse=True)
+def state_tree_parallel(base_env_state_tree_root_dir):
+    top_file = """
+    base:
+      'localhost':
+        - parallel
+      '127.0.0.1':
+        - parallel
+    """
+    state_file = """
+    {%- for i in range(5) %}
+    This runs in parallel {{ i }}:
+      cmd.run:
+        - name: sleep 0.{{ i }}
+        - parallel: true
+    {%- endfor %}
+    """
+    top_tempfile = pytest.helpers.temp_file(
+        "top.sls", top_file, base_env_state_tree_root_dir
+    )
+    state_tempfile = pytest.helpers.temp_file(
+        "parallel.sls", state_file, base_env_state_tree_root_dir
+    )
+    with top_tempfile, state_tempfile:
+        yield
+
+
+@pytest.mark.parametrize(
+    "args",
+    (
+        pytest.param(("state.sls", "parallel"), id="sls"),
+        pytest.param(("state.highstate",), id="highstate"),
+        pytest.param(("state.top", "top.sls"), id="top"),
+    ),
+)
+def test_it(salt_ssh_cli, args):
+    """
+    Ensure states with ``parallel: true`` do not cause a crash.
+    This does not check that they were actually run in parallel
+    since that would result either in a long-running or flaky test.
+    """
+    ret = salt_ssh_cli.run(*args)
+    assert ret.returncode == 0
+    assert isinstance(ret.data, dict)
+    for i in range(5):
+        key = f"cmd_|-This runs in parallel {i}_|-sleep 0.{i}_|-run"
+        assert key in ret.data
+        assert "pid" in ret.data[key]["changes"]
+        assert ret.data[key]["changes"]["retcode"] == 0
diff --git a/tests/pytests/integration/ssh/test_cp.py b/tests/pytests/integration/ssh/test_cp.py
index ee240f23252..8fddb7bd73d 100644
--- a/tests/pytests/integration/ssh/test_cp.py
+++ b/tests/pytests/integration/ssh/test_cp.py
@@ -368,7 +368,7 @@ def test_get_url_nonexistent_source(salt_ssh_cli, caplog):
 
 def test_get_url_https(salt_ssh_cli, tmp_path, cachedir):
     tgt = tmp_path / "index.html"
-    res = salt_ssh_cli.run("cp.get_url", "https://repo.saltproject.io/index.html", tgt)
+    res = salt_ssh_cli.run("cp.get_url", "https://saltproject.io/index.html", tgt)
     assert res.returncode == 0
     assert res.data
     assert res.data == str(tgt)
@@ -378,23 +378,20 @@ def test_get_url_https(salt_ssh_cli, tmp_path, cachedir):
         / salt_ssh_cli.get_minion_tgt()
         / "extrn_files"
         / "base"
-        / "repo.saltproject.io"
+        / "saltproject.io"
         / "index.html"
     )
     for path in (tgt, master_path):
         assert path.exists()
         data = path.read_text(encoding="utf-8")
         assert "Salt Project" in data
-        assert "Package" in data
-        assert "Repo" in data
-        assert "AYBABTU" not in data
 
 
 def test_get_url_https_dest_empty(salt_ssh_cli, tmp_path, cachedir):
     """
     https:// source given and destination omitted, should still cache the file
     """
-    res = salt_ssh_cli.run("cp.get_url", "https://repo.saltproject.io/index.html")
+    res = salt_ssh_cli.run("cp.get_url", "https://saltproject.io/index.html")
     assert res.returncode == 0
     assert res.data
     master_path = (
@@ -403,7 +400,7 @@ def test_get_url_https_dest_empty(salt_ssh_cli, tmp_path, cachedir):
         / salt_ssh_cli.get_minion_tgt()
         / "extrn_files"
         / "base"
-        / "repo.saltproject.io"
+        / "saltproject.io"
         / "index.html"
     )
     tgt = _convert(salt_ssh_cli, cachedir, master_path)
@@ -412,9 +409,6 @@ def test_get_url_https_dest_empty(salt_ssh_cli, tmp_path, cachedir):
         assert path.exists()
         data = path.read_text(encoding="utf-8")
         assert "Salt Project" in data
-        assert "Package" in data
-        assert "Repo" in data
-        assert "AYBABTU" not in data
 
 
 def test_get_url_https_no_dest(salt_ssh_cli):
@@ -425,21 +419,16 @@ def test_get_url_https_no_dest(salt_ssh_cli):
     start = time.time()
     sleep = 5
     while time.time() - start <= timeout:
-        res = salt_ssh_cli.run(
-            "cp.get_url", "https://repo.saltproject.io/index.html", None
-        )
+        res = salt_ssh_cli.run("cp.get_url", "https://saltproject.io/index.html", None)
         if isinstance(res.data, str) and res.data.find("HTTP 599") == -1:
             break
         time.sleep(sleep)
     if isinstance(res.data, str) and res.data.find("HTTP 599") != -1:
-        raise Exception("https://repo.saltproject.io/index.html returned 599 error")
+        raise Exception("https://saltproject.io/index.html returned 599 error")
     assert res.returncode == 0
     assert res.data
     assert isinstance(res.data, str)
     assert "Salt Project" in res.data
-    assert "Package" in res.data
-    assert "Repo" in res.data
-    assert "AYBABTU" not in res.data
 
 
 @pytest.mark.parametrize("scheme", ("file://", ""))
@@ -523,24 +512,18 @@ def test_get_file_str_nonexistent_source(salt_ssh_cli, caplog):
 
 
 def test_get_file_str_https(salt_ssh_cli, cachedir):
-    src = "https://repo.saltproject.io/index.html"
+    src = "https://saltproject.io/index.html"
     res = salt_ssh_cli.run("cp.get_file_str", src)
     assert res.returncode == 0
     assert res.data
     assert isinstance(res.data, str)
     assert "Salt Project" in res.data
-    assert "Package" in res.data
-    assert "Repo" in res.data
-    assert "AYBABTU" not in res.data
-    tgt = cachedir / "extrn_files" / "base" / "repo.saltproject.io" / "index.html"
+    tgt = cachedir / "extrn_files" / "base" / "saltproject.io" / "index.html"
     master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True)
     for path in (tgt, master_path):
         assert path.exists()
         text = path.read_text(encoding="utf-8")
         assert "Salt Project" in text
-        assert "Package" in text
-        assert "Repo" in text
-        assert "AYBABTU" not in text
 
 
 def test_get_file_str_local(salt_ssh_cli, cachedir, caplog):
@@ -577,7 +560,7 @@ def test_cache_file(salt_ssh_cli, suffix, cachedir):
 def _cache_twice(salt_master, request, salt_ssh_cli, cachedir):
 
     # ensure the cache is clean
-    tgt = cachedir / "extrn_files" / "base" / "repo.saltproject.io" / "index.html"
+    tgt = cachedir / "extrn_files" / "base" / "saltproject.io" / "index.html"
     tgt.unlink(missing_ok=True)
     master_tgt = _convert(salt_ssh_cli, cachedir, tgt, master=True)
     master_tgt.unlink(missing_ok=True)
@@ -585,7 +568,7 @@ def _cache_twice(salt_master, request, salt_ssh_cli, cachedir):
     # create a template that will cause a file to get cached twice
     # within the same context
     name = "cp_cache"
-    src = "https://repo.saltproject.io/index.html"
+    src = "https://saltproject.io/index.html"
     remove = getattr(request, "param", False)
     contents = f"""
 {{%- set cache = salt["cp.cache_file"]("{src}") %}}
diff --git a/tests/pytests/integration/ssh/test_jinja_mods.py b/tests/pytests/integration/ssh/test_jinja_mods.py
index aa745c7cdcd..f0bd7c508f4 100644
--- a/tests/pytests/integration/ssh/test_jinja_mods.py
+++ b/tests/pytests/integration/ssh/test_jinja_mods.py
@@ -28,3 +28,124 @@ def test_echo(salt_ssh_cli, base_env_state_tree_root_dir):
         ret = salt_ssh_cli.run("state.apply", name)
         result = StateResult(ret.data)
         assert result.comment == echo
+
+
+@pytest.fixture
+def _exewrap(base_env_state_tree_root_dir, salt_run_cli):
+    exe = """
+def run():
+    return "exe"
+"""
+
+    wrapper = """
+def run():
+    return "wrapper"
+"""
+    name = "exewrap"
+    try:
+        with pytest.helpers.temp_file(
+            f"{name}.py", exe, base_env_state_tree_root_dir / "_modules"
+        ):
+            with pytest.helpers.temp_file(
+                f"{name}.py", wrapper, base_env_state_tree_root_dir / "_wrapper"
+            ):
+                res = salt_run_cli.run("saltutil.sync_all")
+                assert res.returncode == 0
+                assert f"modules.{name}" in res.data["modules"]
+                assert f"wrapper.{name}" in res.data["wrapper"]
+                yield name
+    finally:
+        res = salt_run_cli.run("saltutil.sync_all")
+        assert res.returncode == 0
+
+
+@pytest.fixture
+def _jinja_loader_attr_template(base_env_state_tree_root_dir, _exewrap):
+    contents = f"""
+foo:
+  test.show_notification:
+    - text: {{{{ salt.{_exewrap}.run() | json }}}}
+    """
+    name = "funcwrapper_attr_exewrap_test"
+    with pytest.helpers.temp_file(
+        f"{name}.sls", contents, base_env_state_tree_root_dir
+    ):
+        yield name
+
+
+def test_wrapper_attribute_access(_jinja_loader_attr_template, salt_ssh_cli):
+    """
+    Ensure wrappers can be accessed via the attribute syntax.
+    It's not recommended to use this syntax, but the regular loader supports it
+    as well, so we should have feature parity.
+    Issue #66600.
+    """
+    res = salt_ssh_cli.run("state.apply", _jinja_loader_attr_template)
+    assert res.returncode == 0
+    ret = StateResult(res.data)
+    assert ret.result is True
+    assert ret.comment == "wrapper"
+
+
+@pytest.fixture
+def _jinja_loader_get_template(base_env_state_tree_root_dir, _exewrap):
+    contents = """
+foo:
+  test.show_notification:
+    - text: {{ salt.grains.get("id") | json }}
+    """
+    name = "funcwrapper_attr_get_test"
+    with pytest.helpers.temp_file(
+        f"{name}.sls", contents, base_env_state_tree_root_dir
+    ):
+        yield name
+
+
+def test_wrapper_attribute_access_get(_jinja_loader_get_template, salt_ssh_cli):
+    """
+    Ensure a function named `.get` is not shadowed when accessed via attribute syntax.
+    It's not recommended to use it, but the regular loader supports it
+    as well, so we should have feature parity.
+    Issue #41794.
+    """
+    res = salt_ssh_cli.run("state.apply", _jinja_loader_get_template)
+    assert res.returncode == 0
+    ret = StateResult(res.data)
+    assert ret.result is True
+    assert ret.comment == "localhost"
+
+
+@pytest.fixture
+def _python_loader_attribute_access_template(base_env_state_tree_root_dir, _exewrap):
+    contents = """
+#!py
+def run():
+    return {
+        "foo": {
+            "test.show_notification": [
+                {"text": __salt__.grains.get("id")}
+            ]
+        }
+    }
+    """
+    name = "funcwrapper_attr_python_test"
+    with pytest.helpers.temp_file(
+        f"{name}.sls", contents, base_env_state_tree_root_dir
+    ):
+        yield name
+
+
+def test_wrapper_attribute_access_non_jinja(
+    _python_loader_attribute_access_template, salt_ssh_cli
+):
+    """
+    Ensure attribute access works with non-Jinja renderers.
+    It's not recommended to use this syntax, but the regular loader supports it
+    as well, so we should have feature parity.
+    Issue #66376.
+    """
+    res = salt_ssh_cli.run("state.apply", _python_loader_attribute_access_template)
+    assert res.returncode == 0
+    ret = StateResult(res.data)
+    assert ret.result is True
+    assert ret.comment == "localhost"
diff --git a/tests/pytests/integration/ssh/test_master.py b/tests/pytests/integration/ssh/test_master.py
index 5a81fcb8d0e..8b566756c55 100644
--- a/tests/pytests/integration/ssh/test_master.py
+++ b/tests/pytests/integration/ssh/test_master.py
@@ -2,16 +2,49 @@
 Simple Smoke Tests for Connected SSH minions
 """
 
+import subprocess
+
+import packaging.version
 import pytest
 from saltfactories.utils.functional import StateResult
 
+import salt.utils.platform
+import salt.utils.versions
+
 pytestmark = [
     pytest.mark.slow_test,
     pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
 ]
 
 
+def _check_systemctl():
+    if not hasattr(_check_systemctl, "memo"):
+        if not salt.utils.platform.is_linux():
+            _check_systemctl.memo = False
+        else:
+            proc = subprocess.run(["systemctl"], capture_output=True, check=False)
+            _check_systemctl.memo = (
+                b"Failed to get D-Bus connection: No such file or directory"
+                in proc.stderr
+            )
+    return _check_systemctl.memo
+
+
+def _check_python():
+    try:
+        proc = subprocess.run(
+            ["/usr/bin/python3", "--version"], capture_output=True, check=False
+        )
+    except FileNotFoundError:
+        return True
+    return packaging.version.Version(
+        proc.stdout.decode().strip().split()[1]
+    ) <= packaging.version.Version("3.10")
+
+
 @pytest.mark.skip_if_not_root
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
+@pytest.mark.skipif(_check_python(), reason="System python less than 3.10")
 def test_service(salt_ssh_cli, grains):
     service = "cron"
     os_family = grains["os_family"]
diff --git a/tests/pytests/integration/states/test_file.py b/tests/pytests/integration/states/test_file.py
index 555f2a6116c..23dcd645ba8 100644
--- a/tests/pytests/integration/states/test_file.py
+++ b/tests/pytests/integration/states/test_file.py
@@ -400,7 +400,7 @@ def _check_min_patch_version(shell):
     min_patch_ver = "2.6"
     ret = shell.run("patch", "--version")
     assert ret.returncode == 0
-    version = ret.stdout.strip().split()[2]
+    version = ret.stdout.splitlines()[0].split()[-1]
     if Version(version) < Version(min_patch_ver):
         pytest.xfail(
             "Minimum version of patch not found, expecting {}, found {}".format(
@@ -1290,6 +1290,7 @@ def test_directory_recurse(salt_master, salt_call_cli, tmp_path, grains):
 
     target_file = target_dir / "test-file"
     target_file.write_text("this is a test file")
+    file_perms = target_file.stat().st_mode
 
     target_link = target_dir / "test-link"
     target_link.symlink_to(target_file)
@@ -1298,7 +1299,6 @@ def test_directory_recurse(salt_master, salt_call_cli, tmp_path, grains):
     ret = subprocess.run(["chown", "-h", "nobody", str(target_link)], check=False)
     assert ret.returncode == 0
 
-    file_perms = stat.S_IFREG | stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP
     if grains["os"] != "VMware Photon OS":
         file_perms |= stat.S_IROTH
 
diff --git a/tests/pytests/pkg/conftest.py b/tests/pytests/pkg/conftest.py
index 59d02c3a60d..f26c4a08065 100644
--- a/tests/pytests/pkg/conftest.py
+++ b/tests/pytests/pkg/conftest.py
@@ -34,6 +34,16 @@ def _system_up_to_date(
     grains,
     shell,
 ):
+    gpg_dest = "/etc/apt/keyrings/salt-archive-keyring.gpg"
+    if os.path.exists(gpg_dest):
+        with salt.utils.files.fopen(gpg_dest, "r") as fp:
+            log.error("Salt gpg key is %s", fp.read())
+    else:
+        log.error("Salt gpg not present")
+    # download_file(
+    #    "https://packages.broadcom.com/artifactory/api/security/keypair/SaltProjectKey/public",
+    #    gpg_dest,
+    # )
     if grains["os_family"] == "Debian":
         ret = shell.run("apt", "update")
         assert ret.returncode == 0
@@ -90,12 +100,6 @@ def pytest_addoption(parser):
         action="store_true",
         help="Do not uninstall salt packages after test run is complete",
     )
-    test_selection_group.addoption(
-        "--classic",
-        default=False,
-        action="store_true",
-        help="Test an upgrade from the classic packages.",
-    )
     test_selection_group.addoption(
         "--prev-version",
         action="store",
@@ -231,7 +235,6 @@ def install_salt(request, salt_factories_root_dir):
         downgrade=request.config.getoption("--downgrade"),
         no_uninstall=request.config.getoption("--no-uninstall"),
         no_install=request.config.getoption("--no-install"),
-        classic=request.config.getoption("--classic"),
         prev_version=request.config.getoption("--prev-version"),
         use_prev_version=request.config.getoption("--use-prev-version"),
     ) as fixture:
@@ -357,18 +360,7 @@ def salt_master(salt_factories, install_salt, pkg_tests_account):
 
     master_script = False
     if platform.is_windows():
-        if install_salt.classic:
-            master_script = True
-        if install_salt.relenv:
-            master_script = True
-        elif not install_salt.upgrade:
-            master_script = True
-        if (
-            not install_salt.relenv
-            and install_salt.use_prev_version
-            and not install_salt.classic
-        ):
-            master_script = False
+        master_script = True
 
     if master_script:
         salt_factories.system_service = False
@@ -376,11 +368,7 @@ def salt_master(salt_factories, install_salt, pkg_tests_account):
         scripts_dir = salt_factories.root_dir / "Scripts"
         scripts_dir.mkdir(exist_ok=True)
         salt_factories.scripts_dir = scripts_dir
-        python_executable = install_salt.bin_dir / "Scripts" / "python.exe"
-        if install_salt.classic:
-            python_executable = install_salt.bin_dir / "python.exe"
-        if install_salt.relenv:
-            python_executable = install_salt.install_dir / "Scripts" / "python.exe"
+        python_executable = install_salt.install_dir / "Scripts" / "python.exe"
         salt_factories.python_executable = python_executable
         factory = salt_factories.salt_master_daemon(
             random_string("master-"),
@@ -391,10 +379,6 @@ def salt_master(salt_factories, install_salt, pkg_tests_account):
         )
         salt_factories.system_service = True
     else:
-
-        if install_salt.classic and platform.is_darwin():
-            os.environ["PATH"] += ":/opt/salt/bin"
-
         factory = salt_factories.salt_master_daemon(
             random_string("master-"),
             defaults=config_defaults,
@@ -465,12 +449,6 @@ def salt_minion(salt_factories, salt_master, install_salt):
         )
         config_overrides["winrepo_source_dir"] = r"salt://win/repo_ng"
 
-    if install_salt.classic and platform.is_windows():
-        salt_factories.python_executable = None
-
-    if install_salt.classic and platform.is_darwin():
-        os.environ["PATH"] += ":/opt/salt/bin"
-
     factory = salt_master.salt_minion_daemon(
         minion_id,
         overrides=config_overrides,
diff --git a/tests/pytests/pkg/downgrade/test_salt_downgrade.py b/tests/pytests/pkg/downgrade/test_salt_downgrade.py
index adba7c51272..70f19e211b2 100644
--- a/tests/pytests/pkg/downgrade/test_salt_downgrade.py
+++ b/tests/pytests/pkg/downgrade/test_salt_downgrade.py
@@ -1,12 +1,54 @@
+import time
+
 import packaging.version
 import psutil
+import pytest
 from pytestskipmarkers.utils import platform
 
 
-def test_salt_downgrade(salt_call_cli, install_salt):
+def _get_running_named_salt_pid(process_name):
+
+    # need to check all of command line for salt-minion, salt-master, for example: salt-minion
+    #
+    # Linux: psutil process name only returning first part of the command '/opt/saltstack/'
+    # Linux: ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager']
+    #
+    # MacOS: psutil process name only returning last part of the command '/opt/salt/bin/python3.10', that is 'python3.10'
+    # MacOS: ['/opt/salt/bin/python3.10 /opt/salt/salt-minion', '']
+
+    pids = []
+    for proc in psutil.process_iter():
+        cmd_line = ""
+        try:
+            cmd_line = " ".join(str(element) for element in proc.cmdline())
+        except (psutil.ZombieProcess, psutil.NoSuchProcess):
+            # Even though it's a zombie process, it still has a cmdl_string and
+            # a pid, so we'll use it
+            pass
+        if process_name in cmd_line:
+            try:
+                pids.append(proc.pid)
+            except psutil.NoSuchProcess:
+                # Process is now closed
+                continue
+
+    return pids
+
+
+def test_salt_downgrade_minion(salt_call_cli, install_salt):
     """
-    Test an upgrade of Salt.
+    Test a downgrade of Salt Minion.
     """
+    is_restart_fixed = packaging.version.parse(
+        install_salt.prev_version
+    ) < packaging.version.parse("3006.9")
+
+    if is_restart_fixed and install_salt.distro_id in ("ubuntu", "debian", "darwin"):
+        pytest.skip(
+            "Skip package test, since downgrade version is less than "
+            "3006.9 which had fixes for salt-minion restarting, see PR 66218"
+        )
+
     is_downgrade_to_relenv = packaging.version.parse(
         install_salt.prev_version
     ) >= packaging.version.parse("3006.0")
@@ -15,7 +57,7 @@ def test_salt_downgrade(salt_call_cli, install_salt):
         original_py_version = install_salt.package_python_version()
 
     # Verify current install version is setup correctly and works
-    ret = salt_call_cli.run("test.version")
+    ret = salt_call_cli.run("--local", "test.version")
     assert ret.returncode == 0
     assert packaging.version.parse(ret.data) == packaging.version.parse(
         install_salt.artifact_version
@@ -38,42 +80,34 @@ def test_salt_downgrade(salt_call_cli, install_salt):
     else:
         process_name = "salt-minion"
 
-    old_pid = []
-
-    # psutil process name only returning first part of the command '/opt/saltstack/'
-    # need to check all of command line for salt-minion
-    # ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager']
-    # and psutil is only returning the salt-minion once
-    for proc in psutil.process_iter():
-        if salt_name in proc.name():
-            cmdl_strg = " ".join(str(element) for element in proc.cmdline())
-            if process_name in cmdl_strg:
-                old_pid.append(proc.pid)
-
-    assert old_pid
+    old_minion_pids = _get_running_named_salt_pid(process_name)
+    assert old_minion_pids
 
     # Downgrade Salt to the previous version and test
     install_salt.install(downgrade=True)
+
+    time.sleep(10)  # give it some time
+    # downgrade install will stop services on Debian/Ubuntu
+    # This is due to RedHat systems are not active after an install, but Debian/Ubuntu are active after an install
+    # want to ensure our tests start with the config settings we have set,
+    # trying restart for Debian/Ubuntu to see the outcome
+    if install_salt.distro_id in ("ubuntu", "debian"):
+        install_salt.restart_services()
+
+    time.sleep(60)  # give it some time
+
+    # Verify there is a new running minion by getting its PID and comparing it
+    # with the PID from before the upgrade
+    new_minion_pids = _get_running_named_salt_pid(process_name)
+    assert new_minion_pids
+    assert new_minion_pids != old_minion_pids
+
     bin_file = "salt"
     if platform.is_windows():
         if not is_downgrade_to_relenv:
             bin_file = install_salt.install_dir / "salt-call.bat"
         else:
             bin_file = install_salt.install_dir / "salt-call.exe"
-    elif platform.is_darwin() and install_salt.classic:
-        bin_file = install_salt.bin_dir / "salt-call"
-
-    # Verify there is a new running minion by getting its PID and comparing it
-    # with the PID from before the upgrade
-    new_pid = []
-    for proc in psutil.process_iter():
-        if salt_name in proc.name():
-            cmdl_strg = " ".join(str(element) for element in proc.cmdline())
-            if process_name in cmdl_strg:
-                new_pid.append(proc.pid)
-
-    assert new_pid
-    assert new_pid != old_pid
 
     ret = install_salt.proc.run(bin_file, "--version")
     assert ret.returncode == 0
@@ -81,7 +115,7 @@ def test_salt_downgrade(salt_call_cli, install_salt):
         ret.stdout.strip().split()[1]
     ) < packaging.version.parse(install_salt.artifact_version)
 
-    if is_downgrade_to_relenv:
+    if is_downgrade_to_relenv and not platform.is_darwin():
         new_py_version = install_salt.package_python_version()
         if new_py_version == original_py_version:
             # test pip install after a downgrade
diff --git a/tests/pytests/pkg/download/test_pkg_download.py b/tests/pytests/pkg/download/test_pkg_download.py
index 9a0fbd76bad..d331d527e6f 100644
--- a/tests/pytests/pkg/download/test_pkg_download.py
+++ b/tests/pytests/pkg/download/test_pkg_download.py
@@ -7,8 +7,10 @@ import logging
 import os
 import pathlib
 import shutil
+import time
 
 import packaging.version
+import psutil
 import pytest
 from pytestskipmarkers.utils import platform
 
@@ -53,33 +55,10 @@ def get_salt_test_commands():
 
 @pytest.fixture(scope="module")
 def root_url(salt_release):
-    if os.environ.get("SALT_REPO_TYPE", "release") == "staging":
-        repo_domain = os.environ.get(
-            "SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io"
-        )
-    else:
-        repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io")
-    if "rc" in salt_release:
-        salt_path = "salt_rc/salt"
-    else:
-        salt_path = "salt"
-    salt_repo_user = os.environ.get("SALT_REPO_USER")
-    if salt_repo_user:
-        log.info(
-            "SALT_REPO_USER: %s",
-            salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1],
-        )
-    salt_repo_pass = os.environ.get("SALT_REPO_PASS")
-    if salt_repo_pass:
-        log.info(
-            "SALT_REPO_PASS: %s",
-            salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1],
-        )
-    if salt_repo_user and salt_repo_pass:
-        repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}"
-    _root_url = f"https://{repo_domain}/{salt_path}/py3"
-    log.info("Repository Root URL: %s", _root_url)
-    return _root_url
+    default_url = "https://packages.broadcom.com/artifactory"
+    repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", default_url)
+    log.info("Repository Root URL: %s", repo_domain)
+    return repo_domain
 
 
 @pytest.fixture(scope="module")
@@ -93,46 +72,15 @@ def get_salt_release():
     if salt_release is None:
         if pkg_test_type == "download-pkgs":
             log.warning(
-                "Setting salt release to 3006.0rc2 which is probably not what you want."
+                "Setting salt release to 3006.0 which is probably not what you want."
             )
-        salt_release = "3006.0rc2"
+        salt_release = "3006.0"
     if pkg_test_type == "download-pkgs":
-        if packaging.version.parse(salt_release) < packaging.version.parse("3006.0rc1"):
+        if packaging.version.parse(salt_release) < packaging.version.parse("3006.0"):
             log.warning("The salt release being tested, %r looks off.", salt_release)
     return salt_release
 
 
-def get_repo_subpath_params():
-    current_release = packaging.version.parse(get_salt_release())
-    params = ["minor", current_release.major]
-    latest_env_var = os.environ.get("LATEST_SALT_RELEASE")
-    if latest_env_var is not None:
-        latest_release = packaging.version.parse(latest_env_var)
-        if current_release >= latest_release:
-            log.debug(
-                "Running the tests for the latest release since %s >= %s",
-                current_release,
-                latest_release,
-            )
-            params.append("latest")
-    return params
-
-
-@pytest.fixture(
-    scope="module",
-    params=get_repo_subpath_params(),
-)
-def repo_subpath(request):
-    return request.param
-
-
-@pytest.fixture(scope="module")
-def gpg_key_name(salt_release):
-    if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
-        return "SALT-PROJECT-GPG-PUBKEY-2023.pub"
-    return "salt-archive-keyring.gpg"
-
-
 @pytest.fixture(scope="module")
 def salt_release():
     yield get_salt_release()
@@ -151,8 +99,6 @@ def _setup_system(
     shell,
     root_url,
     salt_release,
-    gpg_key_name,
-    repo_subpath,
     package_type,
     tmp_path_factory,
     onedir_install_path,
@@ -166,7 +112,6 @@ def _setup_system(
                 root_url=root_url,
                 salt_release=salt_release,
                 downloads_path=downloads_path,
-                repo_subpath=repo_subpath,
                 package_type=package_type,
                 onedir_install_path=onedir_install_path,
             ):
@@ -178,7 +123,6 @@ def _setup_system(
                     root_url=root_url,
                     salt_release=salt_release,
                     downloads_path=downloads_path,
-                    repo_subpath=repo_subpath,
                     package_type=package_type,
                     onedir_install_path=onedir_install_path,
                 )
@@ -190,8 +134,6 @@ def _setup_system(
                     root_url=root_url,
                     salt_release=salt_release,
                     downloads_path=downloads_path,
-                    gpg_key_name=gpg_key_name,
-                    repo_subpath=repo_subpath,
                 )
             elif grains["os"] == "Fedora":
                 setup_redhat_family(
@@ -201,8 +143,6 @@ def _setup_system(
                     root_url=root_url,
                     salt_release=salt_release,
                     downloads_path=downloads_path,
-                    gpg_key_name=gpg_key_name,
-                    repo_subpath=repo_subpath,
                 )
             elif grains["os"] == "VMware Photon OS":
                 setup_redhat_family(
@@ -212,8 +152,6 @@ def _setup_system(
                     root_url=root_url,
                     salt_release=salt_release,
                     downloads_path=downloads_path,
-                    gpg_key_name=gpg_key_name,
-                    repo_subpath=repo_subpath,
                 )
             elif grains["os_family"] == "RedHat":
                 setup_redhat_family(
@@ -223,20 +161,14 @@ def _setup_system(
                     root_url=root_url,
                     salt_release=salt_release,
                     downloads_path=downloads_path,
-                    gpg_key_name=gpg_key_name,
-                    repo_subpath=repo_subpath,
                 )
             elif grains["os_family"] == "Debian":
                 setup_debian_family(
                     shell,
-                    os_name=grains["os"].lower(),
-                    os_version=grains["osrelease"],
                     os_codename=grains["oscodename"],
                     root_url=root_url,
                     salt_release=salt_release,
                     downloads_path=downloads_path,
-                    gpg_key_name=gpg_key_name,
-                    repo_subpath=repo_subpath,
                     package_type=package_type,
                     onedir_install_path=onedir_install_path,
                 )
@@ -254,34 +186,19 @@ def setup_redhat_family(
     root_url,
     salt_release,
     downloads_path,
-    gpg_key_name,
-    repo_subpath,
 ):
     arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
 
     if os_name == "photon":
         os_version = f"{os_version}.0"
 
-    if repo_subpath == "minor":
-        repo_url_base = (
-            f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}"
-        )
-    else:
-        repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}"
-
-    gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}"
-
-    try:
-        pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
-    except Exception as exc:  # pylint: disable=broad-except
-        pytest.fail(f"Failed to download {gpg_file_url}: {exc}")
-
-    ret = shell.run("rpm", "--import", str(downloads_path / gpg_key_name), check=False)
-    if ret.returncode != 0:
-        pytest.fail("Failed to import gpg key")
+    repo_url_base = f"{root_url}/saltproject-rpm"
 
+    # Download the salt.repo
+    # It contains the gpg key url so we don't need to download it here
+    salt_repo_url = "https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo"
     repo_file = pytest.helpers.download_file(
-        f"{repo_url_base}.repo", downloads_path / f"salt-{os_name}.repo"
+        salt_repo_url, downloads_path / "salt.repo"
     )
 
     commands = [
@@ -291,13 +208,13 @@ def setup_redhat_family(
             "yum",
             "install",
             "-y",
-            "salt-master",
-            "salt-minion",
-            "salt-ssh",
-            "salt-syndic",
-            "salt-cloud",
-            "salt-api",
-            "salt-debuginfo",
+            f"salt-master-{salt_release}",
+            f"salt-minion-{salt_release}",
+            f"salt-ssh-{salt_release}",
+            f"salt-syndic-{salt_release}",
+            f"salt-cloud-{salt_release}",
+            f"salt-api-{salt_release}",
+            f"salt-debuginfo-{salt_release}",
         ),
     ]
 
@@ -309,14 +226,10 @@ def setup_redhat_family(
 
 def setup_debian_family(
     shell,
-    os_name,
-    os_version,
     os_codename,
     root_url,
     salt_release,
     downloads_path,
-    gpg_key_name,
-    repo_subpath,
     package_type,
     onedir_install_path,
 ):
@@ -331,11 +244,9 @@ def setup_debian_family(
         elif arch == "x86_64":
             arch = "amd64"
 
-        if repo_subpath == "minor":
-            repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}"
-        else:
-            repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}"
-        gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}"
+        repo_url_base = f"{root_url}/saltproject-deb/"
+        gpg_file_url = "https://packages.broadcom.com/artifactory/api/security/keypair/SaltProjectKey/public"
+        gpg_key_name = "SALT-PROJECT-GPG-PUBKEY-2023.pub"
 
         try:
             pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
@@ -380,11 +291,9 @@ def setup_debian_family(
     else:
         # We are testing the onedir download
         onedir_name = f"salt-{salt_release}-onedir-linux-{arch}.tar.xz"
-        if repo_subpath == "minor":
-            repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}"
-        else:
-            repo_url_base = f"{root_url}/onedir/{repo_subpath}"
-        onedir_url = f"{repo_url_base}/{onedir_name}"
+        onedir_url = (
+            f"{root_url}/saltproject-generic/onedir/{salt_release}/{onedir_name}"
+        )
         onedir_location = downloads_path / onedir_name
         onedir_extracted = onedir_install_path
 
@@ -401,24 +310,13 @@ def setup_macos(
     root_url,
     salt_release,
     downloads_path,
-    repo_subpath,
     package_type,
     onedir_install_path,
 ):
     arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
     if package_type == "package":
-
-        if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
-            mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg"
-            if repo_subpath == "minor":
-                mac_pkg_url = (
-                    f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}"
-                )
-            else:
-                mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}"
-        else:
-            mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}"
-            mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg"
+        mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg"
+        mac_pkg_url = f"{root_url}/saltproject-generic/macos/{salt_release}/{mac_pkg}"
 
         mac_pkg_path = downloads_path / mac_pkg
         pytest.helpers.download_file(mac_pkg_url, mac_pkg_path)
@@ -435,11 +333,7 @@ def setup_macos(
     else:
         # We are testing the onedir download
         onedir_name = f"salt-{salt_release}-onedir-macos-{arch}.tar.xz"
-        if repo_subpath == "minor":
-            repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}"
-        else:
-            repo_url_base = f"{root_url}/onedir/{repo_subpath}"
-        onedir_url = f"{repo_url_base}/{onedir_name}"
+        onedir_url = f"{root_url}/onedir/{salt_release}/{onedir_name}"
         onedir_location = downloads_path / onedir_name
         onedir_extracted = onedir_install_path
 
@@ -457,40 +351,71 @@ def setup_windows(
     root_url,
     salt_release,
     downloads_path,
-    repo_subpath,
     package_type,
     onedir_install_path,
+    timeout=300,
 ):
     try:
         arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
         if package_type != "onedir":
             root_dir = pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt")
 
-            if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
-                if package_type.lower() == "nsis":
-                    if arch.lower() != "x86":
-                        arch = arch.upper()
-                    win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe"
-                else:
-                    if arch.lower() != "x86":
-                        arch = arch.upper()
-                    win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi"
-                if repo_subpath == "minor":
-                    win_pkg_url = (
-                        f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}"
-                    )
-                else:
-                    win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}"
-                ssm_bin = root_dir / "ssm.exe"
+            if package_type.lower() == "nsis":
+                if arch.lower() != "x86":
+                    arch = arch.upper()
+                win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe"
             else:
-                win_pkg = f"salt-{salt_release}-windows-{arch}.exe"
-                win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}"
-                ssm_bin = root_dir / "bin" / "ssm_bin"
+                if arch.lower() != "x86":
+                    arch = arch.upper()
+                win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi"
+            win_pkg_url = (
+                f"{root_url}/saltproject-generic/windows/{salt_release}/{win_pkg}"
+            )
+            ssm_bin = root_dir / "ssm.exe"
 
             pkg_path = downloads_path / win_pkg
 
             pytest.helpers.download_file(win_pkg_url, pkg_path)
             if package_type.lower() == "nsis":
+                # We need to make sure there are no installer/uninstaller
+                # processes running. Uninst.exe launches a 2nd binary
+                # (Un.exe or Un_*.exe) Let's get the name of the process
+                processes = [
+                    win_pkg,
+                    "uninst.exe",
+                    "Un.exe",
+                    "Un_A.exe",
+                    "Un_B.exe",
+                    "Un_C.exe",
+                    "Un_D.exe",
+                    "Un_D.exe",
+                    "Un_F.exe",
+                    "Un_G.exe",
+                ]
+                proc_name = ""
+                for proc in processes:
+                    try:
+                        if proc in (p.name() for p in psutil.process_iter()):
+                            proc_name = proc
+                    except psutil.NoSuchProcess:
+                        continue
+
+                # We need to give the process time to exit. We'll timeout after
+                # 5 minutes or whatever timeout is set to
+                if proc_name:
+                    elapsed_time = 0
+                    while elapsed_time < timeout:
+                        try:
+                            if proc_name not in (
+                                p.name() for p in psutil.process_iter()
+                            ):
+                                break
+                        except psutil.NoSuchProcess:
+                            continue
+                        elapsed_time += 0.1
+                        time.sleep(0.1)
+
+                # Only run setup when we're sure no other installations are running
                 ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False)
             else:
                 ret = shell.run(
@@ -512,11 +437,7 @@ def setup_windows(
         else:
             # We are testing the onedir download
             onedir_name = f"salt-{salt_release}-onedir-windows-{arch}.zip"
-            if repo_subpath == "minor":
-                repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}"
-            else:
-                repo_url_base = f"{root_url}/onedir/{repo_subpath}"
-            onedir_url = f"{repo_url_base}/{onedir_name}"
+            onedir_url = f"{root_url}/onedir/{salt_release}/{onedir_name}"
             onedir_location = downloads_path / onedir_name
             onedir_extracted = onedir_install_path
 
@@ -556,6 +477,7 @@ def salt_test_command(request, install_dir):
     return command
 
 
+@pytest.mark.skip_on_windows(reason="This is flaky on Windows")
 @pytest.mark.parametrize("salt_test_command", get_salt_test_commands(), indirect=True)
 def test_download(shell, salt_test_command):
     """
diff --git a/tests/pytests/pkg/integration/test_clean_zmq_teardown.py b/tests/pytests/pkg/integration/test_clean_zmq_teardown.py
index 309493e69aa..d1dbe325ab2 100644
--- a/tests/pytests/pkg/integration/test_clean_zmq_teardown.py
+++ b/tests/pytests/pkg/integration/test_clean_zmq_teardown.py
@@ -12,12 +12,6 @@ pytestmark = [
 log = logging.getLogger(__name__)
 
 
-@pytest.fixture(autouse=True)
-def _skip_on_non_relenv(install_salt):
-    if not install_salt.relenv:
-        pytest.skip("This test is for relenv versions of salt")
-
-
 def test_check_no_import_error(salt_call_cli, salt_master):
     """
     Test that we don't have any errors on teardown of python when using a py-rendered sls file
diff --git a/tests/pytests/pkg/integration/test_enabled_disabled.py b/tests/pytests/pkg/integration/test_enabled_disabled.py
index 99097b187ee..4cfa5d2adc1 100644
--- a/tests/pytests/pkg/integration/test_enabled_disabled.py
+++ b/tests/pytests/pkg/integration/test_enabled_disabled.py
@@ -2,8 +2,8 @@ import pytest
 from pytestskipmarkers.utils import platform
 
 
-@pytest.mark.skip_on_windows(reason="Linux test only")
-def test_services(install_salt, salt_cli, salt_minion):
+@pytest.mark.skip_unless_on_linux(reason="Linux test only")
+def test_services(install_salt, salt_call_cli):
     """
     Check if Services are enabled/disabled
     """
@@ -29,9 +29,15 @@ def test_services(install_salt, salt_cli, salt_minion):
         pytest.fail(f"Don't know how to handle os_family={install_salt.distro_id}")
 
     for service in services_enabled:
-        ret = salt_cli.run("service.enabled", service, minion_tgt=salt_minion.id)
-        assert "true" in ret.stdout
+        test_cmd = f"systemctl show -p UnitFileState {service}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        test_enabled = ret.stdout.strip().split("=")[1].split('"')[0].strip()
+        assert ret.returncode == 0
+        assert test_enabled == "enabled"
 
     for service in services_disabled:
-        ret = salt_cli.run("service.disabled", service, minion_tgt=salt_minion.id)
-        assert "true" in ret.stdout
+        test_cmd = f"systemctl show -p UnitFileState {service}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        test_enabled = ret.stdout.strip().split("=")[1].split('"')[0].strip()
+        assert ret.returncode == 0
+        assert test_enabled == "disabled"
diff --git a/tests/pytests/pkg/integration/test_pip.py b/tests/pytests/pkg/integration/test_pip.py
index 12d34bbffea..6fdb2965018 100644
--- a/tests/pytests/pkg/integration/test_pip.py
+++ b/tests/pytests/pkg/integration/test_pip.py
@@ -75,8 +75,6 @@ def test_pip_install_extras(shell, install_salt, extras_pypath_bin):
     """
     Test salt-pip installs into the correct directory
     """
-    if not install_salt.relenv:
-        pytest.skip("The extras directory is only in relenv versions")
     dep = "pep8"
     extras_keyword = "extras-3"
     if platform.is_windows():
@@ -126,11 +124,7 @@ def test_pip_non_root(
     pypath,
     pkg_tests_account_environ,
 ):
-    if install_salt.classic:
-        pytest.skip("We can install non-root for classic packages")
     check_path = extras_pypath_bin / "pep8"
-    if not install_salt.relenv and not install_salt.classic:
-        check_path = pypath / "pep8"
     # We should be able to issue a --help without being root
     ret = subprocess.run(
         install_salt.binary_paths["salt"] + ["--help"],
@@ -180,8 +174,6 @@ def test_pip_install_salt_extension_in_extras(install_salt, extras_pypath, shell
     Test salt-pip installs into the correct directory and the salt extension
     is properly loaded.
     """
-    if not install_salt.relenv:
-        pytest.skip("The extras directory is only in relenv versions")
     dep = "salt-analytics-framework"
     dep_version = "0.1.0"
 
diff --git a/tests/pytests/pkg/integration/test_python.py b/tests/pytests/pkg/integration/test_python.py
index 9b16cea3796..77d2a82a16c 100644
--- a/tests/pytests/pkg/integration/test_python.py
+++ b/tests/pytests/pkg/integration/test_python.py
@@ -6,9 +6,6 @@ import pytest
 
 @pytest.fixture
 def python_script_bin(install_salt):
-    # Tiamat builds run scripts via `salt python`
-    if not install_salt.relenv and not install_salt.classic:
-        return install_salt.binary_paths["python"][:1] + ["python"]
     return install_salt.binary_paths["python"]
 
 
diff --git a/tests/pytests/pkg/integration/test_salt_api.py b/tests/pytests/pkg/integration/test_salt_api.py
index 3ba7b74b62a..b13775bd794 100644
--- a/tests/pytests/pkg/integration/test_salt_api.py
+++ b/tests/pytests/pkg/integration/test_salt_api.py
@@ -5,10 +5,17 @@ pytestmark = [
 ]
 
 
-def test_salt_api(api_request):
+def test_salt_api(api_request, install_salt, salt_master):
     """
     Test running a command against the salt api
     """
+    if install_salt.distro_id in ("ubuntu", "debian"):
+        pytest.skip(
+            "Package test are getting reworked in https://github.com/saltstack/salt/issues/66672"
+        )
+
+    assert salt_master.is_running()
+
     ret = api_request.post(
         "/run",
         data={
diff --git a/tests/pytests/pkg/integration/test_salt_call.py b/tests/pytests/pkg/integration/test_salt_call.py
index 69f434a2c40..c16ecb67481 100644
--- a/tests/pytests/pkg/integration/test_salt_call.py
+++ b/tests/pytests/pkg/integration/test_salt_call.py
@@ -13,10 +13,12 @@ def test_salt_call_local(salt_call_cli):
     assert ret.data is True
 
 
-def test_salt_call(salt_call_cli):
+def test_salt_call(salt_call_cli, salt_master):
     """
     Test salt-call test.ping
     """
+    assert salt_master.is_running()
+
     ret = salt_call_cli.run("test.ping")
     assert ret.returncode == 0
     assert ret.data is True
@@ -44,10 +46,12 @@ def state_name(salt_master):
         yield name
 
 
-def test_sls(salt_call_cli, state_name):
+def test_sls(salt_call_cli, salt_master, state_name):
     """
     Test calling a sls file
     """
+    assert salt_master.is_running()
+
     ret = salt_call_cli.run("state.apply", state_name)
     assert ret.returncode == 0
     assert ret.data
diff --git a/tests/pytests/pkg/integration/test_salt_grains.py b/tests/pytests/pkg/integration/test_salt_grains.py
index 422179350b9..071dcf1525e 100644
--- a/tests/pytests/pkg/integration/test_salt_grains.py
+++ b/tests/pytests/pkg/integration/test_salt_grains.py
@@ -6,37 +6,45 @@ pytestmark = [
 ]
 
 
-def test_grains_items(salt_cli, salt_minion):
+def test_grains_items(salt_cli, salt_minion, salt_master):
     """
     Test grains.items
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run("grains.items", minion_tgt=salt_minion.id)
     assert ret.data, ret
     assert "osrelease" in ret.data
 
 
-def test_grains_item_os(salt_cli, salt_minion):
+def test_grains_item_os(salt_cli, salt_minion, salt_master):
     """
     Test grains.item os
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run("grains.item", "os", minion_tgt=salt_minion.id)
     assert ret.data, ret
     assert "os" in ret.data
 
 
-def test_grains_item_pythonversion(salt_cli, salt_minion):
+def test_grains_item_pythonversion(salt_cli, salt_minion, salt_master):
     """
     Test grains.item pythonversion
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run("grains.item", "pythonversion", minion_tgt=salt_minion.id)
     assert ret.data, ret
     assert "pythonversion" in ret.data
 
 
-def test_grains_setval_key_val(salt_cli, salt_minion):
+def test_grains_setval_key_val(salt_cli, salt_minion, salt_master):
     """
     Test grains.setval key val
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run("grains.setval", "key", "val", minion_tgt=salt_minion.id)
     assert ret.data, ret
     assert "key" in ret.data
diff --git a/tests/pytests/pkg/integration/test_salt_minion.py b/tests/pytests/pkg/integration/test_salt_minion.py
index b62de8d841e..1a06db1b1f3 100644
--- a/tests/pytests/pkg/integration/test_salt_minion.py
+++ b/tests/pytests/pkg/integration/test_salt_minion.py
@@ -5,20 +5,24 @@ pytestmark = [
 ]
 
 
-def test_salt_minion_ping(salt_cli, salt_minion):
+def test_salt_minion_ping(salt_cli, salt_minion, salt_master):
     """
     Test running a command against a targeted minion
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run("test.ping", minion_tgt=salt_minion.id)
     assert ret.returncode == 0
     assert ret.data is True
 
 
-def test_salt_minion_setproctitle(salt_cli, salt_minion):
+def test_salt_minion_setproctitle(salt_cli, salt_minion, salt_master):
     """
     Test that setproctitle is working
     for the running Salt minion
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run(
         "ps.pgrep", "MinionProcessManager", full=True, minion_tgt=salt_minion.id
     )
diff --git a/tests/pytests/pkg/integration/test_salt_output.py b/tests/pytests/pkg/integration/test_salt_output.py
index e05cf457ded..b4d61044846 100644
--- a/tests/pytests/pkg/integration/test_salt_output.py
+++ b/tests/pytests/pkg/integration/test_salt_output.py
@@ -6,10 +6,12 @@ pytestmark = [
 
 
 @pytest.mark.parametrize("output_fmt", ["yaml", "json"])
-def test_salt_output(salt_cli, salt_minion, output_fmt):
+def test_salt_output(salt_cli, salt_minion, salt_master, output_fmt):
     """
     Test --output
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run(
         f"--output={output_fmt}", "test.fib", "7", minion_tgt=salt_minion.id
     )
diff --git a/tests/pytests/pkg/integration/test_salt_pillar.py b/tests/pytests/pkg/integration/test_salt_pillar.py
index f6cacf14b3c..7e1f98a3542 100644
--- a/tests/pytests/pkg/integration/test_salt_pillar.py
+++ b/tests/pytests/pkg/integration/test_salt_pillar.py
@@ -35,10 +35,12 @@ def pillar_name(salt_master):
         yield name
 
 
-def test_salt_pillar(salt_cli, salt_minion, pillar_name):
+def test_salt_pillar(salt_cli, salt_minion, salt_master, pillar_name):
     """
     Test pillar.items
     """
+    assert salt_master.is_running()
+
     ret = salt_cli.run("pillar.items", minion_tgt=salt_minion.id)
     assert ret.returncode == 0
     assert pillar_name in ret.data
diff --git a/tests/pytests/pkg/integration/test_salt_state_file.py b/tests/pytests/pkg/integration/test_salt_state_file.py
index 1aadf3dbddb..0c4804654cb 100644
--- a/tests/pytests/pkg/integration/test_salt_state_file.py
+++ b/tests/pytests/pkg/integration/test_salt_state_file.py
@@ -52,13 +52,14 @@ def state_name(files, salt_master):
         yield name
 
 
-def test_salt_state_file(salt_cli, salt_minion, state_name, files):
+def test_salt_state_file(salt_cli, salt_minion, salt_master, state_name, files):
     """
     Test state file
     """
     assert files.fpath_1.exists() is False
     assert files.fpath_2.exists() is False
     assert files.fpath_3.exists() is False
+    assert salt_master.is_running()
 
     ret = salt_cli.run("state.apply", state_name, minion_tgt=salt_minion.id)
     assert ret.returncode == 0
diff --git a/tests/pytests/pkg/integration/test_salt_ufw.py b/tests/pytests/pkg/integration/test_salt_ufw.py
index 2164de85c57..6c86e0a3339 100644
--- a/tests/pytests/pkg/integration/test_salt_ufw.py
+++ b/tests/pytests/pkg/integration/test_salt_ufw.py
@@ -2,18 +2,45 @@ import pathlib
 
 import pytest
 
+pytestmark = [
+    pytest.mark.skip_unless_on_linux,
+]
+
+
+@pytest.fixture
+def salt_systemd_setup(
+    install_salt,
+    salt_call_cli,
+):
+    """
+    Fixture to set systemd for salt packages to enabled and active
+    Note: assumes Salt packages already installed
+    """
+    install_salt.install()
+
+    # ensure known state, enabled and active
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl enable {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+        test_cmd = f"systemctl restart {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
 
-@pytest.mark.skip_on_windows
 @pytest.mark.skip_if_binaries_missing("ufw")
-def test_salt_ufw(salt_master, salt_call_cli, install_salt):
+def test_salt_ufw(salt_systemd_setup, install_salt, salt_call_cli):
     """
     Test salt.ufw for Debian/Ubuntu salt-master
     """
     if install_salt.distro_id not in ("debian", "ubuntu"):
         pytest.skip("Only tests Debian / Ubuntu packages")
 
-    # check that the salt_master is running
-    assert salt_master.is_running()
+    # setup systemd to enabled and active for Salt packages
+    # pylint: disable=pointless-statement
+    salt_systemd_setup
 
     ufw_master_path = pathlib.Path("/etc/ufw/applications.d/salt.ufw")
     assert ufw_master_path.exists()
diff --git a/tests/pytests/pkg/integration/test_salt_user.py b/tests/pytests/pkg/integration/test_salt_user.py
index 834fd399121..3978bfe9ca7 100644
--- a/tests/pytests/pkg/integration/test_salt_user.py
+++ b/tests/pytests/pkg/integration/test_salt_user.py
@@ -2,6 +2,7 @@ import os
 import pathlib
 import subprocess
 import sys
+import time
 
 import packaging.version
 import psutil
@@ -9,18 +10,33 @@ import pytest
 from saltfactories.utils.tempfiles import temp_directory
 
 pytestmark = [
-    pytest.mark.skip_on_windows,
-    pytest.mark.skip_on_darwin,
-    pytest.mark.skipif(
-        True,
-        reason=(
-            "Package permissions are getting reworked in "
-            "https://github.com/saltstack/salt/pull/66218"
-        ),
-    ),
+    pytest.mark.skip_unless_on_linux,
 ]
 
 
+@pytest.fixture
+def salt_systemd_setup(
+    install_salt,
+    salt_call_cli,
+):
+    """
+    Fixture to set systemd for salt packages to enabled and active
+    Note: assumes Salt packages already installed
+    """
+    install_salt.install()
+
+    # ensure known state, enabled and active
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl enable {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+        test_cmd = f"systemctl restart {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+
 @pytest.fixture
 def pkg_paths():
     """
@@ -52,8 +68,12 @@ def pkg_paths_salt_user():
         "/var/log/salt/master",
         "/var/log/salt/api",
         "/var/log/salt/key",
+        "/var/log/salt/syndic",
         "/var/cache/salt/master",
         "/var/run/salt/master",
+        "/run/salt-master.pid",
+        "/run/salt-syndic.pid",
+        "/run/salt-api.pid",
     ]
 
 
@@ -68,16 +88,18 @@ def pkg_paths_salt_user_exclusions():
     return paths
 
 
-@pytest.fixture(autouse=True)
-def _skip_on_non_relenv(install_salt):
-    if not install_salt.relenv:
-        pytest.skip("The salt user only exists on relenv versions of salt")
-
-
-def test_salt_user_master(salt_master, install_salt):
+def test_salt_user_master(install_salt, salt_master):
     """
     Test the correct user is running the Salt Master
     """
+    for count in range(0, 30):
+        if salt_master.is_running():
+            break
+        else:
+            time.sleep(2)
+
+    assert salt_master.is_running()
+
     match = False
     for proc in psutil.Process(salt_master.pid).children():
         assert proc.username() == "salt"
@@ -86,10 +108,12 @@ def test_salt_user_master(salt_master, install_salt):
     assert match
 
 
-def test_salt_user_home(install_salt):
+def test_salt_user_home(install_salt, salt_master):
     """
     Test the salt user's home is /opt/saltstack/salt
     """
+    assert salt_master.is_running()
+
     proc = subprocess.run(
         ["getent", "passwd", "salt"], check=False, capture_output=True
     )
@@ -102,10 +126,12 @@ def test_salt_user_home(install_salt):
     assert home == "/opt/saltstack/salt"
 
 
-def test_salt_user_group(install_salt):
+def test_salt_user_group(install_salt, salt_master):
     """
     Test the salt user is in the salt group
     """
+    assert salt_master.is_running()
+
     proc = subprocess.run(["id", "salt"], check=False, capture_output=True)
     assert proc.returncode == 0
     in_group = False
@@ -118,10 +144,12 @@ def test_salt_user_group(install_salt):
     assert in_group is True
 
 
-def test_salt_user_shell(install_salt):
+def test_salt_user_shell(install_salt, salt_master):
     """
     Test the salt user's login shell
     """
+    assert salt_master.is_running()
+
     proc = subprocess.run(
         ["getent", "passwd", "salt"], check=False, capture_output=True
     )
@@ -137,7 +165,11 @@ def test_salt_user_shell(install_salt):
 
 
 def test_pkg_paths(
-    install_salt, pkg_paths, pkg_paths_salt_user, pkg_paths_salt_user_exclusions
+    install_salt,
+    pkg_paths,
+    pkg_paths_salt_user,
+    pkg_paths_salt_user_exclusions,
+    salt_call_cli,
 ):
     """
     Test package paths ownership
@@ -146,12 +178,15 @@ def test_pkg_paths(
         "3006.4"
     ):
         pytest.skip("Package path ownership was changed in salt 3006.4")
+
     salt_user_subdirs = []
+
     for _path in pkg_paths:
         pkg_path = pathlib.Path(_path)
         assert pkg_path.exists()
         for dirpath, sub_dirs, files in os.walk(pkg_path):
             path = pathlib.Path(dirpath)
+
             # Directories owned by salt:salt or their subdirs/files
             if (
                 str(path) in pkg_paths_salt_user or str(path) in salt_user_subdirs
@@ -171,6 +206,8 @@ def test_pkg_paths(
                 assert path.owner() == "root"
                 assert path.group() == "root"
                 for file in files:
+                    if file.endswith("ipc"):
+                        continue
                     file_path = path.joinpath(file)
                     # Individual files owned by salt user
                     if str(file_path) in pkg_paths_salt_user:
@@ -182,7 +219,11 @@ def test_pkg_paths(
 
 @pytest.mark.skip_if_binaries_missing("logrotate")
 def test_paths_log_rotation(
-    salt_master, salt_minion, salt_call_cli, install_salt, pkg_tests_account
+    install_salt,
+    salt_master,
+    salt_minion,
+    salt_call_cli,
+    pkg_tests_account,
 ):
     """
     Test the correct ownership is assigned when log rotation occurs
@@ -207,8 +248,6 @@ def test_paths_log_rotation(
             "Only tests RedHat family packages till logrotation paths are resolved on Ubuntu/Debian, see issue 65231"
         )
 
-    # check that the salt_master is running
-    assert salt_master.is_running()
     match = False
     for proc in psutil.Process(salt_master.pid).children():
         assert proc.username() == "salt"
@@ -375,3 +414,7 @@ def test_paths_log_rotation(
 
                                 bkup_count += 1
                                 assert ret.returncode == 0
+
+    # ensure leave salt_master running
+    salt_master.start()
+    assert salt_master.is_running() is True
diff --git a/tests/pytests/pkg/integration/test_systemd_config.py b/tests/pytests/pkg/integration/test_systemd_config.py
index 828e4413ad7..5f705eb2ee9 100644
--- a/tests/pytests/pkg/integration/test_systemd_config.py
+++ b/tests/pytests/pkg/integration/test_systemd_config.py
@@ -3,7 +3,7 @@ import subprocess
 import pytest
 
 pytestmark = [
-    pytest.mark.skip_on_windows(reason="Linux test only"),
+    pytest.mark.skip_unless_on_linux,
 ]
 
 
diff --git a/tests/pytests/pkg/integration/test_version.py b/tests/pytests/pkg/integration/test_version.py
index 521e23973c2..56a5dc33707 100644
--- a/tests/pytests/pkg/integration/test_version.py
+++ b/tests/pytests/pkg/integration/test_version.py
@@ -1,6 +1,7 @@
 import os.path
 import pathlib
 import subprocess
+import time
 
 import pytest
 from pytestskipmarkers.utils import platform
@@ -11,20 +12,35 @@ def test_salt_version(version, install_salt):
     """
     Test version output from salt --version
     """
+    actual = []
     test_bin = os.path.join(*install_salt.binary_paths["salt"])
     ret = install_salt.proc.run(test_bin, "--version")
-    actual = ret.stdout.strip().split(" ")[:2]
+    if "+" in version:
+        # testing a non-release build artifact version
+        actual = ret.stdout.strip().split(" ")[:2]
+    else:
+        # testing against release build version, for example: downgrade
+        actual_ver = ret.stdout.strip().split(" ")[:2]
+        actual_ver_salt = actual_ver[1]  # get salt version
+        if "+" in actual_ver_salt:
+            actual_ver_salt_stripped = actual_ver_salt.split("+")[
+                0
+            ]  # strip any git versioning
+            actual.append(actual_ver[0])
+            actual.append(actual_ver_salt_stripped)
+        else:
+            pytest.skip("We don't run this test on release builds")
+
     expected = ["salt", version]
     assert actual == expected
 
 
 @pytest.mark.skip_on_windows
+@pytest.mark.skip_on_darwin
 def test_salt_versions_report_master(install_salt):
     """
     Test running --versions-report on master
     """
-    if not install_salt.relenv and not install_salt.classic:
-        pytest.skip("Unable to get the python version dynamically from tiamat builds")
     test_bin = os.path.join(*install_salt.binary_paths["master"])
     python_bin = os.path.join(*install_salt.binary_paths["python"])
     ret = install_salt.proc.run(test_bin, "--versions-report")
@@ -39,37 +55,60 @@ def test_salt_versions_report_master(install_salt):
 
 
 @pytest.mark.skip_on_windows
-def test_salt_versions_report_minion(salt_cli, salt_minion):
+def test_salt_versions_report_minion(salt_cli, salt_call_cli, salt_master, salt_minion):
     """
     Test running test.versions_report on minion
     """
     # Make sure the minion is running
+    for count in range(0, 30):
+        if salt_minion.is_running():
+            break
+        else:
+            time.sleep(2)
+
     assert salt_minion.is_running()
+
+    # Make sure the master is running
+    for count in range(0, 30):
+        if salt_master.is_running():
+            break
+        else:
+            time.sleep(2)
+
+    assert salt_master.is_running()
+
     # Make sure we can ping the minion ...
     ret = salt_cli.run(
-        "--timeout=240", "test.ping", minion_tgt=salt_minion.id, _timeout=240
+        "--timeout=600", "test.ping", minion_tgt=salt_minion.id, _timeout=600
     )
+
     assert ret.returncode == 0
     assert ret.data is True
     ret = salt_cli.run(
         "--hard-crash",
         "--failhard",
-        "--timeout=240",
+        "--timeout=300",
         "test.versions_report",
         minion_tgt=salt_minion.id,
-        _timeout=240,
+        _timeout=300,
     )
     ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"])
 
 
+@pytest.mark.skip_on_windows
+@pytest.mark.skip_on_darwin
 @pytest.mark.parametrize(
     "binary", ["master", "cloud", "syndic", "minion", "call", "api"]
 )
-def test_compare_versions(version, binary, install_salt):
+def test_compare_versions(binary, install_salt):
     """
     Test compare versions
     """
+    version = install_salt.artifact_version
     if binary in install_salt.binary_paths:
+        if install_salt.upgrade:
+            install_salt.install()
+
         ret = install_salt.proc.run(
             *install_salt.binary_paths[binary],
             "--version",
@@ -89,7 +128,6 @@ def test_compare_versions(version, binary, install_salt):
 @pytest.mark.parametrize(
     "symlink",
     [
-        # We can't create a salt symlink because there is a salt directory
         "salt",
         "salt-api",
         "salt-call",
@@ -109,15 +147,13 @@ def test_symlinks_created(version, symlink, install_salt):
     """
     Test symlinks created
     """
-    if install_salt.classic:
-        pytest.skip("Symlinks not created for classic macos builds, we adjust the path")
-    if not install_salt.relenv and symlink == "spm":
-        symlink = "salt-spm"
     ret = install_salt.proc.run(pathlib.Path("/usr/local/sbin") / symlink, "--version")
+    install_log_file = pathlib.Path("/tmp") / "postinstall.txt"
+    install_log_content = install_log_file.read_text()
     ret.stdout.matcher.fnmatch_lines([f"*{version}*"])
 
 
-@pytest.mark.skip_on_windows
+@pytest.mark.skip_unless_on_linux
 @pytest.mark.skip_if_binaries_missing("rpmdev-vercmp")
 def test_compare_pkg_versions_redhat_rc(version, install_salt):
     """
diff --git a/tests/pytests/pkg/upgrade/test_salt_upgrade.py b/tests/pytests/pkg/upgrade/test_salt_upgrade.py
index fd883705c4a..5bce37d6aeb 100644
--- a/tests/pytests/pkg/upgrade/test_salt_upgrade.py
+++ b/tests/pytests/pkg/upgrade/test_salt_upgrade.py
@@ -1,38 +1,128 @@
-import logging
+import time
 
 import packaging.version
 import psutil
+import pytest
 from pytestskipmarkers.utils import platform
 
-log = logging.getLogger(__name__)
+pytestmark = [pytest.mark.skip_unless_on_linux(reason="Only supported on Linux family")]
 
 
-def _get_running_salt_minion_pid(process_name):
-    # psutil process name only returning first part of the command '/opt/saltstack/'
-    # need to check all of command line for salt-minion
-    # ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager']
-    # and psutil is only returning the salt-minion once
+@pytest.fixture
+def salt_systemd_setup(
+    salt_call_cli,
+    install_salt,
+):
+    """
+    Fixture to set systemd for salt packages to enabled and active
+    Note: assumes Salt packages already installed
+    """
+    # ensure known state, enabled and active
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl enable {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+        test_cmd = f"systemctl restart {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+
+@pytest.fixture
+def salt_test_upgrade(
+    salt_call_cli,
+    install_salt,
+):
+    """
+    Test upgrade of Salt packages for Minion and Master
+    """
+    # Verify previous install version salt-minion is setup correctly and works
+    ret = salt_call_cli.run("--local", "test.version")
+    assert ret.returncode == 0
+    installed_minion_version = packaging.version.parse(ret.data)
+    assert installed_minion_version < packaging.version.parse(
+        install_salt.artifact_version
+    )
+
+    # Verify previous install version salt-master is setup correctly and works
+    bin_file = "salt"
+    ret = install_salt.proc.run(bin_file, "--version")
+    assert ret.returncode == 0
+    assert packaging.version.parse(
+        ret.stdout.strip().split()[1]
+    ) < packaging.version.parse(install_salt.artifact_version)
+
+    # Verify there is a running minion and master by getting there PIDs
+    process_master_name = "salt-master"
+    if platform.is_windows():
+        process_minion_name = "salt-minion.exe"
+    else:
+        process_minion_name = "salt-minion"
+
+    old_minion_pids = _get_running_named_salt_pid(process_minion_name)
+    old_master_pids = _get_running_named_salt_pid(process_master_name)
+    assert old_minion_pids
+    assert old_master_pids
+
+    # Upgrade Salt (inc. minion, master, etc.) from previous version and test
+    install_salt.install(upgrade=True)
+
+    time.sleep(60)  # give it some time
+
+    ret = salt_call_cli.run("--local", "test.version")
+    assert ret.returncode == 0
+
+    installed_minion_version = packaging.version.parse(ret.data)
+    assert installed_minion_version == packaging.version.parse(
+        install_salt.artifact_version
+    )
+
+    ret = install_salt.proc.run(bin_file, "--version")
+    assert ret.returncode == 0
+    assert packaging.version.parse(
+        ret.stdout.strip().split()[1]
+    ) == packaging.version.parse(install_salt.artifact_version)
+
+    # Verify there is a new running minion and master by getting their PID and comparing them
+    # with previous PIDs from before the upgrade
+
+    new_minion_pids = _get_running_named_salt_pid(process_minion_name)
+    new_master_pids = _get_running_named_salt_pid(process_master_name)
+
+    assert new_minion_pids
+    assert new_master_pids
+    assert new_minion_pids != old_minion_pids
+    assert new_master_pids != old_master_pids
+
+
+def _get_running_named_salt_pid(process_name):
+
+    # need to check all of command line for salt-minion, salt-master, for example: salt-minion
+    #
+    # Linux: psutil process name only returning first part of the command '/opt/saltstack/'
+    # Linux: ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager']
+    #
+    # MacOS: psutil process name only returning last part of the command '/opt/salt/bin/python3.10', that is 'python3.10'
+    # MacOS: ['/opt/salt/bin/python3.10 /opt/salt/salt-minion', '']
+
     pids = []
     for proc in psutil.process_iter():
-        if "salt" in proc.name():
-            cmdl_strg = " ".join(str(element) for element in proc.cmdline())
-            if process_name in cmdl_strg:
-                pids.append(proc.pid)
+        cmdl_strg = " ".join(str(element) for element in proc.cmdline())
+        if process_name in cmdl_strg:
+            pids.append(proc.pid)
+
     return pids
 
 
 def test_salt_upgrade(salt_call_cli, install_salt):
     """
-    Test an upgrade of Salt.
+    Test an upgrade of Salt, Minion and Master
     """
-    if install_salt.relenv:
-        original_py_version = install_salt.package_python_version()
+    if not install_salt.upgrade:
+        pytest.skip("Not testing an upgrade, do not run")
 
-    # Verify previous install version is setup correctly and works
-    ret = salt_call_cli.run("--local", "test.version")
-    assert ret.returncode == 0
-    installed_version = packaging.version.parse(ret.data)
-    assert installed_version < packaging.version.parse(install_salt.artifact_version)
+    original_py_version = install_salt.package_python_version()
 
     # Test pip install before an upgrade
     dep = "PyGithub==1.56.0"
@@ -44,43 +134,12 @@ def test_salt_upgrade(salt_call_cli, install_salt):
     use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
     assert "Authentication information could" in use_lib.stderr
 
-    # Verify there is a running minion by getting its PID
-    if installed_version < packaging.version.parse("3006.0"):
-        # This is using PyInstaller
-        process_name = "run minion"
-    else:
-        if platform.is_windows():
-            process_name = "salt-minion.exe"
-        else:
-            process_name = "salt-minion"
-    old_pids = _get_running_salt_minion_pid(process_name)
-    assert old_pids
+    # perform Salt package upgrade test
+    # pylint: disable=pointless-statement
+    salt_test_upgrade
 
-    # Upgrade Salt from previous version and test
-    install_salt.install(upgrade=True)
-    ret = salt_call_cli.run("--local", "test.version")
-    assert ret.returncode == 0
-    installed_version = packaging.version.parse(ret.data)
-    assert installed_version == packaging.version.parse(install_salt.artifact_version)
-
-    # Verify there is a new running minion by getting its PID and comparing it
-    # with the PID from before the upgrade
-    if installed_version < packaging.version.parse("3006.0"):
-        # This is using PyInstaller
-        process_name = "run minion"
-    else:
-        if platform.is_windows():
-            process_name = "salt-minion.exe"
-        else:
-            process_name = "salt-minion"
-    new_pids = _get_running_salt_minion_pid(process_name)
-
-    assert new_pids
-    assert new_pids != old_pids
-
-    if install_salt.relenv:
-        new_py_version = install_salt.package_python_version()
-        if new_py_version == original_py_version:
-            # test pip install after an upgrade
-            use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
-            assert "Authentication information could" in use_lib.stderr
+    new_py_version = install_salt.package_python_version()
+    if new_py_version == original_py_version:
+        # test pip install after an upgrade
+        use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
+        assert "Authentication information could" in use_lib.stderr
diff --git a/tests/pytests/pkg/upgrade/test_systemd_permissions.py b/tests/pytests/pkg/upgrade/test_systemd_permissions.py
new file mode 100644
index 00000000000..12afeb5f656
--- /dev/null
+++ b/tests/pytests/pkg/upgrade/test_systemd_permissions.py
@@ -0,0 +1,343 @@
+import time
+
+import pytest
+
+pytestmark = [
+    pytest.mark.skip_unless_on_linux(reason="Only supported on Linux family"),
+    pytest.mark.skipif(
+        True,
+        reason=(
+            "Package permissions are getting reworked in "
+            "https://github.com/saltstack/salt/pull/66218"
+        ),
+    ),
+]
+
+
+@pytest.fixture
+def salt_systemd_setup(
+    salt_call_cli,
+    install_salt,
+):
+    """
+    Fixture to set systemd for salt packages to enabled and active
+    Note: assumes Salt packages already installed
+    """
+    install_salt.install()
+
+    # ensure known state, enabled and active
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl enable {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+        test_cmd = f"systemctl restart {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+
+def test_salt_systemd_disabled_preservation(
+    salt_call_cli, install_salt, salt_systemd_setup
+):
+    """
+    Test upgrade of Salt packages preserve disabled state of systemd
+    """
+    if not install_salt.upgrade:
+        pytest.skip("Not testing an upgrade, do not run")
+
+    # setup systemd to enabled and active for Salt packages
+    # pylint: disable=pointless-statement
+    salt_systemd_setup
+
+    # ensure known state, disabled
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl disable {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+    # Upgrade Salt (inc. minion, master, etc.) from previous version and test
+    # pylint: disable=pointless-statement
+    install_salt.install(upgrade=True)
+    time.sleep(60)  # give it some time
+
+    # test for disabled systemd state
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl show -p UnitFileState {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        test_enabled = ret.stdout.strip().split("=")[1].split('"')[0].strip()
+        assert ret.returncode == 0
+        assert test_enabled == "disabled"
+
+
+def test_salt_systemd_enabled_preservation(
+    salt_call_cli, install_salt, salt_systemd_setup
+):
+    """
+    Test upgrade of Salt packages preserve enabled state of systemd
+    """
+    if not install_salt.upgrade:
+        pytest.skip("Not testing an upgrade, do not run")
+
+    # setup systemd to enabled and active for Salt packages
+    # pylint: disable=pointless-statement
+    salt_systemd_setup
+
+    # Upgrade Salt (inc. minion, master, etc.) from previous version and test
+    # pylint: disable=pointless-statement
+    install_salt.install(upgrade=True)
+    time.sleep(60)  # give it some time
+
+    # test for enabled systemd state
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl show -p UnitFileState {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        test_enabled = ret.stdout.strip().split("=")[1].split('"')[0].strip()
+        assert ret.returncode == 0
+        assert test_enabled == "enabled"
+
+
+def test_salt_systemd_inactive_preservation(
+    salt_call_cli, install_salt, salt_systemd_setup
+):
+    """
+    Test upgrade of Salt packages preserve inactive state of systemd
+    """
+    if not install_salt.upgrade:
+        pytest.skip("Not testing an upgrade, do not run")
+
+    # setup systemd to enabled and active for Salt packages
+    # pylint: disable=pointless-statement
+    salt_systemd_setup
+
+    # ensure known state, disabled
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl stop {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        assert ret.returncode == 0
+
+    # Upgrade Salt (inc. minion, master, etc.) from previous version and test
+    # pylint: disable=pointless-statement
+    install_salt.install(upgrade=True)
+    time.sleep(60)  # give it some time
+
+    # test for inactive systemd state
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl is-active {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        test_active = ret.stdout.strip().split()[2].strip('"').strip()
+        assert ret.returncode == 1
+        assert test_active == "inactive"
+
+
+def test_salt_systemd_active_preservation(
+    salt_call_cli, install_salt, salt_systemd_setup
+):
+    """
+    Test upgrade of Salt packages preserve active state of systemd
+    """
+    if not install_salt.upgrade:
+        pytest.skip("Not testing an upgrade, do not run")
+
+    # setup systemd to enabled and active for Salt packages
+    # pylint: disable=pointless-statement
+    salt_systemd_setup
+
+    # Upgrade Salt (inc. minion, master, etc.) from previous version and test
+    # pylint: disable=pointless-statement
+    install_salt.install(upgrade=True)
+    time.sleep(60)  # give it some time
+
+    # test for active systemd state
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl is-active {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+        test_active = ret.stdout.strip().split()[2].strip('"').strip()
+        assert ret.returncode == 0
+        assert test_active == "active"
+
+
+def test_salt_ownership_permission(salt_call_cli, install_salt, salt_systemd_setup):
+    """
+    Test upgrade of Salt packages preserve existing ownership
+    """
+    if not install_salt.upgrade:
+        pytest.skip("Not testing an upgrade, do not run")
+
+    # setup systemd to enabled and active for Salt packages
+    # pylint: disable=pointless-statement
+    salt_systemd_setup
+
+    # test ownership for Minion, Master and Api
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        if "salt-api" == test_item:
+            test_cmd = f"ls -dl /run/{test_item}.pid"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_user = ret.stdout.strip().split()[4]
+            assert ret.returncode == 0
+            assert test_user == "salt"
+
+            test_cmd = f"ls -dl /run/{test_item}.pid"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_group = ret.stdout.strip().split()[5]
+            assert ret.returncode == 0
+            assert test_group == "salt"
+        else:
+            test_name = test_item.strip().split("-")[1]
+            test_cmd = f"ls -dl /run/salt/{test_name}"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_user = ret.stdout.strip().split()[4]
+            assert ret.returncode == 0
+            if test_item == "salt-minion":
+                assert test_user == "root"
+            else:
+                assert test_user == "salt"
+
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_group = ret.stdout.strip().split()[5]
+            assert ret.returncode == 0
+            if test_item == "salt-minion":
+                assert test_group == "root"
+            else:
+                assert test_group == "salt"
+
+    # create master user, and minion user, change conf, restart and test ownership
+    test_master_user = "horse"
+    test_minion_user = "donkey"
+    ret = salt_call_cli.run("--local", "user.list_users")
+    user_list = ret.stdout.strip().split(":")[1]
+
+    if test_master_user not in user_list:
+        ret = salt_call_cli.run("--local", "user.add", f"{test_master_user}")
+
+    if test_minion_user not in user_list:
+        ret = salt_call_cli.run("--local", "user.add", f"{test_minion_user}")
+
+    ret = salt_call_cli.run(
+        "--local", "file.comment_line", "/etc/salt/master", "^user:"
+    )
+    assert ret.returncode == 0
+
+    ret = salt_call_cli.run(
+        "--local", "file.comment_line", "/etc/salt/minion", "^user:"
+    )
+    assert ret.returncode == 0
+
+    test_string = f"\nuser: {test_master_user}\n"
+    ret = salt_call_cli.run("--local", "file.append", "/etc/salt/master", test_string)
+
+    test_string = f"\nuser: {test_minion_user}\n"
+    ret = salt_call_cli.run("--local", "file.append", "/etc/salt/minion", test_string)
+
+    # restart and check ownership is correct
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl restart {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+
+    time.sleep(10)  # allow some time for restart
+
+    # test ownership for Minion, Master and Api - horse and donkey
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        if "salt-api" == test_item:
+            test_cmd = f"ls -dl /run/{test_item}.pid"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_user = ret.stdout.strip().split()[4]
+            assert ret.returncode == 0
+            assert test_user == f"{test_master_user}"
+
+            test_cmd = f"ls -dl /run/{test_item}.pid"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_group = ret.stdout.strip().split()[5]
+            assert ret.returncode == 0
+            assert test_group == f"{test_master_user}"
+        else:
+            test_name = test_item.strip().split("-")[1]
+            test_cmd = f"ls -dl /run/salt/{test_name}"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_user = ret.stdout.strip().split()[4]
+            assert ret.returncode == 0
+            if test_item == "salt-minion":
+                assert test_user == f"{test_minion_user}"
+            else:
+                assert test_user == f"{test_master_user}"
+
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_group = ret.stdout.strip().split()[5]
+            assert ret.returncode == 0
+            if test_item == "salt-minion":
+                assert test_group == f"{test_minion_user}"
+            else:
+                assert test_group == f"{test_master_user}"
+
+    # Upgrade Salt (inc. minion, master, etc.) from previous version and test
+    # pylint: disable=pointless-statement
+    install_salt.install(upgrade=True)
+    time.sleep(60)  # give it some time
+
+    # test ownership for Minion, Master and Api
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        if "salt-api" == test_item:
+            test_cmd = f"ls -dl /run/{test_item}.pid"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_user = ret.stdout.strip().split()[4]
+            assert ret.returncode == 0
+            assert test_user == f"{test_master_user}"
+
+            test_cmd = f"ls -dl /run/{test_item}.pid"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_group = ret.stdout.strip().split()[5]
+            assert ret.returncode == 0
+            assert test_group == f"{test_master_user}"
+        else:
+            test_name = test_item.strip().split("-")[1]
+            test_cmd = f"ls -dl /run/salt/{test_name}"
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_user = ret.stdout.strip().split()[4]
+            assert ret.returncode == 0
+            if test_item == "salt-minion":
+                assert test_user == f"{test_minion_user}"
+            else:
+                assert test_user == f"{test_master_user}"
+
+            ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+            test_group = ret.stdout.strip().split()[5]
+            assert ret.returncode == 0
+            if test_item == "salt-minion":
+                assert test_group == f"{test_minion_user}"
+            else:
+                assert test_group == f"{test_master_user}"
+
+    # restore to defaults to ensure further tests run fine
+    ret = salt_call_cli.run(
+        "--local", "file.comment_line", "/etc/salt/master", "^user:"
+    )
+    assert ret.returncode == 0
+
+    ret = salt_call_cli.run(
+        "--local", "file.comment_line", "/etc/salt/minion", "^user:"
+    )
+    assert ret.returncode == 0
+
+    test_string = "\nuser: salt\n"
+    ret = salt_call_cli.run("--local", "file.append", "/etc/salt/master", test_string)
+
+    test_string = "\nuser: root\n"
+    ret = salt_call_cli.run("--local", "file.append", "/etc/salt/minion", test_string)
+
+    # restart and check ownership is correct
+    test_list = ["salt-api", "salt-minion", "salt-master"]
+    for test_item in test_list:
+        test_cmd = f"systemctl restart {test_item}"
+        ret = salt_call_cli.run("--local", "cmd.run", test_cmd)
+
+    time.sleep(10)  # allow some time for restart
diff --git a/tests/pytests/scenarios/cluster/test_cluster.py b/tests/pytests/scenarios/cluster/test_cluster.py
index 5f81f30b09a..107f2566181 100644
--- a/tests/pytests/scenarios/cluster/test_cluster.py
+++ b/tests/pytests/scenarios/cluster/test_cluster.py
@@ -2,7 +2,7 @@
 Cluster scinarios.
 """
 
-import os
+import getpass
 import pathlib
 import time
 
@@ -44,7 +44,7 @@ def test_cluster_key_rotation(
     assert not dfpath.exists()
     salt.crypt.dropfile(
         cluster_master_1.config["cachedir"],
-        user=os.getlogin(),
+        user=getpass.getuser(),
         master_id=cluster_master_1.config["id"],
     )
     assert dfpath.exists()
diff --git a/tests/pytests/scenarios/dns/multimaster/test_dns.py b/tests/pytests/scenarios/dns/multimaster/test_dns.py
index 5e0fc4c80f7..5712c21a896 100644
--- a/tests/pytests/scenarios/dns/multimaster/test_dns.py
+++ b/tests/pytests/scenarios/dns/multimaster/test_dns.py
@@ -39,7 +39,7 @@ def test_multimaster_dns(
             log.info("Removed secondary master IP address.")
             # Wait for the minion's master_alive_interval, adding a second for
             # reliablity.
-            time.sleep(master_alive_interval + 1)
+            time.sleep(master_alive_interval + 10)
             assert (
                 "Master ip address changed from 172.16.0.1 to 127.0.0.1" in caplog.text
             )
diff --git a/tests/pytests/scenarios/multimaster/conftest.py b/tests/pytests/scenarios/multimaster/conftest.py
index 84e7a9a3ceb..481a4a433ef 100644
--- a/tests/pytests/scenarios/multimaster/conftest.py
+++ b/tests/pytests/scenarios/multimaster/conftest.py
@@ -25,6 +25,12 @@ def salt_mm_master_1(request, salt_factories):
         "publish_signing_algorithm": (
             "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1"
         ),
+        "log_granular_levels": {
+            "salt": "info",
+            "salt.transport": "debug",
+            "salt.channel": "debug",
+            "salt.utils.event": "debug",
+        },
     }
     factory = salt_factories.salt_master_daemon(
         "mm-master-1",
@@ -56,6 +62,12 @@ def salt_mm_master_2(salt_factories, salt_mm_master_1):
         "publish_signing_algorithm": (
             "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1"
         ),
+        "log_granular_levels": {
+            "salt": "info",
+            "salt.transport": "debug",
+            "salt.channel": "debug",
+            "salt.utils.event": "debug",
+        },
     }
 
     # Use the same ports for both masters, they are binding to different interfaces
@@ -106,6 +118,13 @@ def salt_mm_minion_1(salt_mm_master_1, salt_mm_master_2):
         "fips_mode": FIPS_TESTRUN,
         "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1",
         "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1",
+        "log_granular_levels": {
+            "salt": "info",
+            "salt.minion": "debug",
+            "salt.transport": "debug",
+            "salt.channel": "debug",
+            "salt.utils.event": "debug",
+        },
     }
     factory = salt_mm_master_1.salt_minion_daemon(
         "mm-minion-1",
@@ -136,6 +155,13 @@ def salt_mm_minion_2(salt_mm_master_1, salt_mm_master_2):
         "fips_mode": FIPS_TESTRUN,
         "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1",
         "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1",
+        "log_granular_levels": {
+            "salt": "info",
+            "salt.minion": "debug",
+            "salt.transport": "debug",
+            "salt.channel": "debug",
+            "salt.utils.event": "debug",
+        },
     }
     factory = salt_mm_master_2.salt_minion_daemon(
         "mm-minion-2",
diff --git a/tests/pytests/scenarios/performance/test_performance.py b/tests/pytests/scenarios/performance/test_performance.py
index 5c22b3fa2ee..1190dd3b230 100644
--- a/tests/pytests/scenarios/performance/test_performance.py
+++ b/tests/pytests/scenarios/performance/test_performance.py
@@ -94,6 +94,9 @@ def prev_master(
         container_run_kwargs={
             "network": docker_network_name,
             "hostname": prev_master_id,
+            "volumes": {
+                str(CODE_DIR): {"bind": "/salt", "mode": "z"},
+            },
         },
         start_timeout=120,
         max_start_attempts=3,
@@ -101,6 +104,7 @@ def prev_master(
         skip_on_pull_failure=True,
         skip_if_docker_client_not_connectable=True,
     )
+    factory.before_start(_install_salt_in_container, factory)
     with factory.started():
         yield factory
 
@@ -156,6 +160,9 @@ def prev_minion(
         container_run_kwargs={
             "network": docker_network_name,
             "hostname": prev_minion_id,
+            "volumes": {
+                str(CODE_DIR): {"bind": "/salt", "mode": "z"},
+            },
         },
         start_timeout=120,
         max_start_attempts=3,
@@ -167,6 +174,7 @@ def prev_minion(
     factory.after_terminate(
         pytest.helpers.remove_stale_minion_key, prev_master, factory.id
     )
+    factory.before_start(_install_salt_in_container, factory)
     with factory.started():
         yield factory
 
@@ -194,6 +202,16 @@ def _install_salt_in_container(container):
     else:
         requirements_py_version = ret.stdout.strip()
 
+    ret = container.run(
+        "python3",
+        "-m",
+        "pip",
+        "install",
+        "-r",
+        f"/salt/requirements/static/pkg/py{requirements_py_version}/linux.txt",
+    )
+    log.debug("Install Salt package requirements in the container: %s", ret)
+    assert ret.returncode == 0, ret.stderr
     ret = container.run(
         "python3",
         "-m",
@@ -203,7 +221,7 @@ def _install_salt_in_container(container):
         "/salt",
     )
     log.debug("Install Salt in the container: %s", ret)
-    assert ret.returncode == 0
+    assert ret.returncode == 0, ret.stderr
 
 
 @pytest.fixture
diff --git a/tests/pytests/scenarios/transport/test_zeromq.py b/tests/pytests/scenarios/transport/test_zeromq.py
new file mode 100644
index 00000000000..25e6919c65c
--- /dev/null
+++ b/tests/pytests/scenarios/transport/test_zeromq.py
@@ -0,0 +1,82 @@
+import asyncio
+import logging
+import multiprocessing
+import time
+
+import pytest
+
+try:
+    import zmq
+
+    import salt.transport.zeromq
+except ImportError:
+    zmq = None
+
+
+log = logging.getLogger(__name__)
+
+
+def clients(recieved):
+    """
+    Fire up 1000 publish socket clients and wait for a message.
+    """
+    log.debug("Clients start")
+    context = zmq.asyncio.Context()
+    sockets = {}
+    for i in range(1000):
+        socket = context.socket(zmq.SUB)
+        socket.connect("tcp://127.0.0.1:5406")
+        socket.setsockopt(zmq.SUBSCRIBE, b"")
+        sockets[i] = socket
+    log.debug("Clients connected")
+
+    async def check():
+        start = time.time()
+        while time.time() - start < 60:
+            n = 0
+            for i in list(sockets):
+                if await sockets[i].poll():
+                    msg = await sockets[i].recv()
+                    n += 1
+                    log.debug(
+                        "Client %d got message %s total %d", i, msg, recieved.value
+                    )
+                    sockets[i].close(0)
+                    sockets.pop(i)
+            with recieved.get_lock():
+                recieved.value += n
+            await asyncio.sleep(0.3)
+
+    asyncio.run(check())
+
+
+@pytest.mark.skipif(not zmq, reason="Zeromq not installed")
+def test_issue_regression_65265():
+    """
+    Regression test for 65265. This test will not fail 100% of the time prior
+    to the fix for 65265. However, it does pass reliably with the issue fixed.
+    """
+    recieved = multiprocessing.Value("i", 0)
+    process_manager = salt.utils.process.ProcessManager(wait_for_kill=5)
+    opts = {"ipv6": False, "zmq_filtering": False, "zmq_backlog": 1000, "pub_hwm": 1000}
+    process_manager.add_process(clients, args=(recieved,))
+    process_manager.add_process(clients, args=(recieved,))
+    process_manager.add_process(clients, args=(recieved,))
+    # Give some time for all clients to start up before starting server.
+    time.sleep(10)
+    server = salt.transport.zeromq.PublishServer(
+        opts, pub_host="127.0.0.1", pub_port=5406, pull_path="/tmp/pull.ipc"
+    )
+    process_manager.add_process(server.publish_daemon, args=(server.publish_payload,))
+    # Wait some more for the server to start up completely.
+    time.sleep(10)
+    asyncio.run(server.publish(b"asdf"))
+    log.debug("After publish")
+    # Give time for clients to receive thier messages.
+    time.sleep(10)
+    try:
+        with recieved.get_lock():
+            total = recieved.value
+        assert total == 3000
+    finally:
+        process_manager.kill_children(9)
diff --git a/tests/pytests/unit/channel/test_client.py b/tests/pytests/unit/channel/test_client.py
new file mode 100644
index 00000000000..783657c4a45
--- /dev/null
+++ b/tests/pytests/unit/channel/test_client.py
@@ -0,0 +1,19 @@
+import salt.channel.client
+
+
+def test_async_methods():
+    "Validate all defined async_methods and close_methods are present"
+    async_classes = [
+        salt.channel.client.AsyncReqChannel,
+        salt.channel.client.AsyncPubChannel,
+    ]
+    method_attrs = [
+        "async_methods",
+        "close_methods",
+    ]
+    for cls in async_classes:
+        for attr in method_attrs:
+            assert hasattr(cls, attr)
+            assert isinstance(getattr(cls, attr), list)
+            for name in getattr(cls, attr):
+                assert hasattr(cls, name)
diff --git a/tests/pytests/unit/client/ssh/test_ssh.py b/tests/pytests/unit/client/ssh/test_ssh.py
index 9fc56165ff5..9e6e45b6024 100644
--- a/tests/pytests/unit/client/ssh/test_ssh.py
+++ b/tests/pytests/unit/client/ssh/test_ssh.py
@@ -341,7 +341,8 @@ def test_extra_filerefs(tmp_path, opts):
         assert ssh_obj.opts.get("extra_filerefs", None) == "salt://foobar"
 
 
-def test_key_deploy_permission_denied_scp(tmp_path, opts):
+@pytest.mark.parametrize("user_choice", ("y", "n"))
+def test_key_deploy_permission_denied_scp(tmp_path, opts, user_choice):
     """
     test "key_deploy" function when
     permission denied authentication error
@@ -375,19 +376,23 @@ def test_key_deploy_permission_denied_scp(tmp_path, opts):
     patch_roster_file = patch("salt.roster.get_roster_file", MagicMock(return_value=""))
     with patch_roster_file:
         client = ssh.SSH(opts)
-    patch_input = patch("builtins.input", side_effect=["y"])
+    patch_input = patch("builtins.input", side_effect=[user_choice])
     patch_getpass = patch("getpass.getpass", return_value=["password"])
     mock_key_run = MagicMock(return_value=key_run_ret)
     patch_key_run = patch("salt.client.ssh.SSH._key_deploy_run", mock_key_run)
     with patch_input, patch_getpass, patch_key_run:
         ret = client.key_deploy(host, ssh_ret)
-    assert mock_key_run.call_args_list[0][0] == (
-        host,
-        {"passwd": [passwd], "host": host, "user": usr},
-        True,
-    )
-    assert ret == key_run_ret
-    assert mock_key_run.call_count == 1
+    if user_choice == "y":
+        assert mock_key_run.call_args_list[0][0] == (
+            host,
+            {"passwd": [passwd], "host": host, "user": usr},
+            True,
+        )
+        assert ret == key_run_ret
+        assert mock_key_run.call_count == 1
+    else:
+        mock_key_run.assert_not_called()
+        assert ret == (ssh_ret, None)
 
 
 def test_key_deploy_permission_denied_file_scp(tmp_path, opts):
diff --git a/tests/pytests/unit/client/ssh/wrapper/test_cp.py b/tests/pytests/unit/client/ssh/wrapper/test_cp.py
index 77f8ebb0878..9ce4636056e 100644
--- a/tests/pytests/unit/client/ssh/wrapper/test_cp.py
+++ b/tests/pytests/unit/client/ssh/wrapper/test_cp.py
@@ -780,22 +780,19 @@ def test_get_url_non_salt_dest_empty_without_no_cache(client, cache_root, dest):
     Even when dest is None, but no_cache is False, the file should be sent
     to the minion cache.
     """
-    tgt = (
-        Path(cache_root) / "extrn_files" / "base" / "repo.saltproject.io" / "index.html"
-    )
+    tgt = Path(cache_root) / "extrn_files" / "base" / "saltproject.io" / "index.html"
     local_cache = (
         Path(cache_root)
         / "salt-ssh"
         / TGT
         / "extrn_files"
         / "base"
-        / "repo.saltproject.io"
+        / "saltproject.io"
         / "index.html"
     )
-    res = client.get_url("https://repo.saltproject.io/index.html", dest)
+    res = client.get_url("https://saltproject.io/index.html", dest)
     assert res
     assert local_cache.exists()
-    assert local_cache.read_text() == "hi there"
     assert res == str(local_cache)
     assert str(local_cache) in client.target_map
     assert client.target_map[str(local_cache)] == str(tgt)
@@ -811,10 +808,10 @@ def test_get_url_non_salt_dest_slash(client, cache_root, tmp_path):
         / TGT
         / "extrn_files"
         / "base"
-        / "repo.saltproject.io"
+        / "saltproject.io"
         / "foo.html"
     )
-    res = client.get_url("https://repo.saltproject.io/foo.html", str(tmp_path) + "/")
+    res = client.get_url("https://saltproject.io/foo.html", str(tmp_path) + "/")
     assert res
     assert local_cache.exists()
     assert local_cache.read_text() == "hi there"
@@ -835,10 +832,10 @@ def test_get_url_non_salt_dest_isdir(client, cache_root):
         / TGT
         / "extrn_files"
         / "base"
-        / "repo.saltproject.io"
+        / "saltproject.io"
         / "foo.html"
     )
-    res = client.get_url("https://repo.saltproject.io/foo.html", dest)
+    res = client.get_url("https://saltproject.io/foo.html", dest)
     assert res
     assert local_cache.exists()
     assert local_cache.read_text() == "hi there"
@@ -858,13 +855,12 @@ def test_get_url_non_salt_dest_name_override(client, cache_root):
         / TGT
         / "extrn_files"
         / "base"
-        / "repo.saltproject.io"
+        / "saltproject.io"
         / "foo.html"
     )
-    res = client.get_url("https://repo.saltproject.io/foo.html", dest)
+    res = client.get_url("https://saltproject.io/foo.html", dest)
     assert res
     assert local_cache.exists()
-    assert local_cache.read_text() == "hi there"
     assert res == str(local_cache)
     assert str(local_cache) in client.target_map
     assert client.target_map[str(local_cache)] == dest
@@ -879,14 +875,9 @@ def test_get_url_non_salt_dest_default_name(client, cache_root, tmp_path):
     # If you then try to cache any other file from that domain, it will
     # actually raise an exception because it attempts to create a dir with the same name
     local_cache = (
-        Path(cache_root)
-        / "salt-ssh"
-        / TGT
-        / "extrn_files"
-        / "base"
-        / "repo.saltproject.io"
+        Path(cache_root) / "salt-ssh" / TGT / "extrn_files" / "base" / "saltproject.io"
     )
-    res = client.get_url("https://repo.saltproject.io", str(tmp_path) + "/")
+    res = client.get_url("https://saltproject.io", str(tmp_path) + "/")
     assert res
     assert local_cache.exists()
     assert local_cache.read_text() == "hi there"
@@ -981,10 +972,10 @@ def test_get_template_dest_name_override(client, cache_root):
         / TGT
         / "extrn_files"
         / "base"
-        / "repo.saltproject.io"
+        / "saltproject.io"
         / "foo.html"
     )
-    res = client.get_url("https://repo.saltproject.io/foo.html", dest)
+    res = client.get_url("https://saltproject.io/foo.html", dest)
     assert res
     assert local_cache.exists()
     assert local_cache.read_text() == "hi there"
diff --git a/tests/pytests/unit/fileserver/test_roots.py b/tests/pytests/unit/fileserver/test_roots.py
index a197b937eec..124c491ce15 100644
--- a/tests/pytests/unit/fileserver/test_roots.py
+++ b/tests/pytests/unit/fileserver/test_roots.py
@@ -341,3 +341,13 @@ def test_serve_file_symlink_destination_not_in_root(tmp_state_tree):
     fnd = {"path": str(symlink / "testfile"), "rel": "bar/testfile"}
     ret = roots.serve_file(load, fnd)
     assert ret == {"data": b"testfile", "dest": "bar/testfile"}
+
+
+def test_relative_file_roots(tmp_state_tree):
+    parent = pathlib.Path(tmp_state_tree).parent
+    reldir = os.path.basename(tmp_state_tree)
+    opts = {"file_roots": copy.copy(roots.__opts__["file_roots"])}
+    opts["file_roots"]["base"] = [reldir]
+    with patch.dict(roots.__opts__, opts), pytest.helpers.change_cwd(str(parent)):
+        ret = roots.find_file("testfile")
+        assert "testfile" == ret["rel"]
diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py
index 8b840738ef7..3015a2a30cb 100644
--- a/tests/pytests/unit/grains/test_core.py
+++ b/tests/pytests/unit/grains/test_core.py
@@ -1261,6 +1261,39 @@ def test_Parrot_OS_grains():
     _run_os_grains_tests(_os_release_data, _os_release_map, expectation)
 
 
+@pytest.mark.skip_unless_on_linux
+def test_manjaro_arm_grains():
+    """
+    Test if OS grains are parsed correctly in Manjaro ARM
+    """
+    # /etc/os-release data taken from ParrotOS 5.1
+    _os_release_data = {
+        "NAME": "Manjaro ARM",
+        "ID": "manjaro-arm",
+        "ID_LIKE": "manjaro arch",
+        "PRETTY_NAME": "Manjaro ARM",
+        "ANSI_COLOR": "1;32",
+        "HOME_URL": "https://www.manjaro.org/",
+        "SUPPORT_URL": "https://forum.manjaro.org/c/arm/",
+        "LOGO": "manjarolinux",
+    }
+    _os_release_map = {
+        "_linux_distribution": ("Manjaro ARM", "24.03", "n/a"),
+    }
+
+    expectation = {
+        "os": "Manjaro ARM",
+        "os_family": "Arch",
+        "oscodename": "Manjaro ARM",
+        "osfullname": "Manjaro ARM",
+        "osrelease": "24.03",
+        "osrelease_info": (24, 3),
+        "osmajorrelease": 24,
+        "osfinger": "Manjaro ARM-24",
+    }
+    _run_os_grains_tests(_os_release_data, _os_release_map, expectation)
+
+
 def test_unicode_error():
     raise_unicode_mock = MagicMock(name="raise_unicode_error", side_effect=UnicodeError)
     with patch("salt.grains.core.hostname"), patch(
@@ -3376,6 +3409,12 @@ def test_linux_gpus(caplog):
             "Vega [Radeon RX Vega]]",
             "amd",
         ],  # AMD
+        [
+            "Processing accelerators",
+            "Advanced Micro Devices, Inc. [AMD/ATI]",
+            "Device X",
+            "amd",
+        ],  # AMD
         [
             "Audio device",
             "Advanced Micro Devices, Inc. [AMD/ATI]",
@@ -4185,34 +4224,93 @@ def test__selinux():
         assert ret == {"enabled": True, "enforced": "Disabled"}
 
 
-def test__systemd():
+@pytest.mark.parametrize(
+    "systemd_data,expected",
+    (
+        (
+            {
+                "pid": 1234,
+                "retcode": 0,
+                "stdout": "systemd 254 (254.3-1)\n+PAM +AUDIT -SELINUX -APPARMOR -IMA +SMACK "
+                "+SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS "
+                "+FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 "
+                "-PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD "
+                "+BPF_FRAMEWORK +XKBCOMMON +UTMP -SYSVINIT default-hierarchy=unified",
+                "stderr": "",
+            },
+            {
+                "version": "254",
+                "features": "+PAM +AUDIT -SELINUX -APPARMOR -IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL "
+                "+ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP "
+                "+LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ "
+                "+ZLIB +ZSTD +BPF_FRAMEWORK +XKBCOMMON +UTMP -SYSVINIT default-hierarchy=unified",
+            },
+        ),
+        (
+            {
+                "pid": 2345,
+                "retcode": 1,
+                "stdout": "",
+                "stderr": "some garbage in the output",
+            },
+            {
+                "version": "UNDEFINED",
+                "features": "",
+            },
+        ),
+        (
+            {
+                "pid": 3456,
+                "retcode": 0,
+                "stdout": "unexpected stdout\none more line",
+                "stderr": "",
+            },
+            {
+                "version": "UNDEFINED",
+                "features": "",
+            },
+        ),
+        (
+            {
+                "pid": 4567,
+                "retcode": 0,
+                "stdout": "",
+                "stderr": "",
+            },
+            {
+                "version": "UNDEFINED",
+                "features": "",
+            },
+        ),
+        (
+            Exception("Some exception on calling `systemctl --version`"),
+            {
+                "version": "UNDEFINED",
+                "features": "",
+            },
+        ),
+    ),
+)
+def test__systemd(systemd_data, expected):
     """
     test _systemd
     """
+
+    def mock_run_all_systemd(_):
+        if isinstance(systemd_data, Exception):
+            raise systemd_data
+        return systemd_data
+
     with patch.dict(
         core.__salt__,
         {
-            "cmd.run": MagicMock(
-                return_value=(
-                    "systemd 254 (254.3-1)\n+PAM +AUDIT -SELINUX -APPARMOR -IMA +SMACK "
-                    "+SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS "
-                    "+FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 "
-                    "-PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD "
-                    "+BPF_FRAMEWORK +XKBCOMMON +UTMP -SYSVINIT default-hierarchy=unified"
-                )
-            ),
+            "cmd.run_all": mock_run_all_systemd,
         },
     ):
         ret = core._systemd()
         assert "version" in ret
         assert "features" in ret
-        assert ret["version"] == "254"
-        assert ret["features"] == (
-            "+PAM +AUDIT -SELINUX -APPARMOR -IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL "
-            "+ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP "
-            "+LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ "
-            "+ZLIB +ZSTD +BPF_FRAMEWORK +XKBCOMMON +UTMP -SYSVINIT default-hierarchy=unified"
-        )
+        assert ret == expected
 
 
 def test__clean_value_uuid(caplog):
diff --git a/tests/pytests/unit/grains/test_disks.py b/tests/pytests/unit/grains/test_disks.py
index a0d6d1030e7..b2a2d5fbd2f 100644
--- a/tests/pytests/unit/grains/test_disks.py
+++ b/tests/pytests/unit/grains/test_disks.py
@@ -2,8 +2,6 @@
     :codeauthor: :email:`Shane Lee <slee@saltstack.com>`
 """
 
-import textwrap
-
 import pytest
 
 import salt.grains.disks as disks
@@ -17,63 +15,58 @@ def configure_loader_modules():
     }
 
 
-def test__windows_disks():
+def test__windows_disks_dict():
     """
-    Test grains._windows_disks, normal return
-    Should return a populated dictionary
+    Test grains._windows_disks with a single disk returned as a dict
+    Should return 1 disk and no ssds
     """
-    mock_which = MagicMock(return_value="C:\\Windows\\System32\\wbem\\WMIC.exe")
-    wmic_result = textwrap.dedent(
-        """
-        DeviceId  MediaType
-        0         4
-        1         0
-        2         3
-        3         5
-    """
-    )
-    mock_run_all = MagicMock(return_value={"stdout": wmic_result, "retcode": 0})
+    devices = {"DeviceID": 0, "MediaType": "HDD"}
+    mock_powershell = MagicMock(return_value=devices)
 
-    with patch("salt.utils.path.which", mock_which), patch.dict(
-        disks.__salt__, {"cmd.run_all": mock_run_all}
-    ):
+    with patch.dict(disks.__salt__, {"cmd.powershell": mock_powershell}):
+        result = disks._windows_disks()
+        expected = {"disks": ["\\\\.\\PhysicalDrive0"], "ssds": []}
+        assert result == expected
+
+
+def test__windows_disks_list():
+    """
+    test grains._windows_disks with multiple disks and types as a list of dicts
+    Should return 4 disks and 1 ssd
+    """
+    devices = [
+        {"DeviceID": 0, "MediaType": "SSD"},
+        {"DeviceID": 1, "MediaType": "HDD"},
+        {"DeviceID": 2, "MediaType": "HDD"},
+        {"DeviceID": 3, "MediaType": "HDD"},
+    ]
+    mock_powershell = MagicMock(return_value=devices)
+
+    with patch.dict(disks.__salt__, {"cmd.powershell": mock_powershell}):
         result = disks._windows_disks()
         expected = {
-            "ssds": ["\\\\.\\PhysicalDrive0"],
             "disks": [
                 "\\\\.\\PhysicalDrive0",
                 "\\\\.\\PhysicalDrive1",
                 "\\\\.\\PhysicalDrive2",
                 "\\\\.\\PhysicalDrive3",
             ],
+            "ssds": ["\\\\.\\PhysicalDrive0"],
         }
         assert result == expected
-        cmd = " ".join(
-            [
-                "C:\\Windows\\System32\\wbem\\WMIC.exe",
-                "/namespace:\\\\root\\microsoft\\windows\\storage",
-                "path",
-                "MSFT_PhysicalDisk",
-                "get",
-                "DeviceID,MediaType",
-                "/format:table",
-            ]
-        )
-        mock_run_all.assert_called_once_with(cmd)
 
 
-def test__windows_disks_retcode():
+def test__windows_disks_empty():
     """
-    Test grains._windows_disks, retcode 1
+    Test grains._windows_disks when nothing is returned
     Should return empty lists
     """
-    mock_which = MagicMock(return_value="C:\\Windows\\System32\\wbem\\WMIC.exe")
-    mock_run_all = MagicMock(return_value={"stdout": "", "retcode": 1})
-    with patch("salt.utils.path.which", mock_which), patch.dict(
-        disks.__salt__, {"cmd.run_all": mock_run_all}
-    ):
+    devices = {}
+    mock_powershell = MagicMock(return_value=devices)
+
+    with patch.dict(disks.__salt__, {"cmd.powershell": mock_powershell}):
+        expected = {"disks": [], "ssds": []}
         result = disks._windows_disks()
-        expected = {"ssds": [], "disks": []}
         assert result == expected
 
 
diff --git a/tests/pytests/unit/grains/test_opts.py b/tests/pytests/unit/grains/test_opts.py
new file mode 100644
index 00000000000..35fc8a06b20
--- /dev/null
+++ b/tests/pytests/unit/grains/test_opts.py
@@ -0,0 +1,20 @@
+"""
+tests.pytests.unit.grains.test_opts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+"""
+
+import salt.grains.opts as opts
+from tests.support.mock import patch
+
+
+def test_grain_opts_does_not_overwrite_core_grains(tmp_path):
+    """
+    Tests that enabling grain_opts doesn't overwrite the core grains
+
+    See: https://github.com/saltstack/salt/issues/66784
+    """
+    dunder_opts = {"grain_opts": True}
+
+    with patch.object(opts, "__opts__", dunder_opts, create=True):
+        with patch.object(opts, "__pillar__", {}, create=True):
+            assert opts.opts() == {"opts": dunder_opts}
diff --git a/tests/pytests/unit/grains/test_secure_boot.py b/tests/pytests/unit/grains/test_secure_boot.py
new file mode 100644
index 00000000000..f56f413beca
--- /dev/null
+++ b/tests/pytests/unit/grains/test_secure_boot.py
@@ -0,0 +1,129 @@
+"""
+    :codeauthor: :email:`David Murphy <david-dm.murphy@broadcom.com`
+"""
+
+import shutil
+
+import pytest
+
+import salt.grains.extra
+from tests.support.mock import patch
+
+pytestmark = [
+    pytest.mark.skip_unless_on_linux(reason="Only supported on Linux family"),
+]
+
+
+@pytest.mark.parametrize(
+    "setting_secure, extra_file, expected_enabled",
+    (
+        (True, False, True),
+        (True, True, False),
+        (False, False, False),
+        (False, True, False),
+    ),
+)
+def test_secure_boot_efivars(tmp_path, setting_secure, extra_file, expected_enabled):
+    secure_boot_path = tmp_path / "secure-boot"
+    secure_boot_path_vars = secure_boot_path / "efivars"
+    secure_boot_path_vars.mkdir(parents=True, exist_ok=True)
+    secure_boot_filepath = secure_boot_path_vars / "SecureBoot-dog"
+
+    if setting_secure:
+        secure_boot_filepath.write_bytes(b"\x06\x00\x00\x00\x01")
+    else:
+        secure_boot_filepath.write_bytes(b"\x06\x00\x00\x00\x00")
+
+    if extra_file:
+        secure_boot_filepath2 = secure_boot_path_vars / "SecureBoot-kat"
+        if setting_secure:
+            secure_boot_filepath2.write_bytes(b"\x06\x00\x00\x00\x01")
+        else:
+            secure_boot_filepath2.write_bytes(b"\x06\x00\x00\x00\x00")
+
+    with patch(
+        "salt.grains.extra.get_secure_boot_path", return_value=secure_boot_path_vars
+    ):
+        grains = salt.grains.extra.uefi()
+        expected = {"efi": True, "efi-secure-boot": expected_enabled}
+        assert grains == expected
+
+    shutil.rmtree(secure_boot_path)
+
+
+@pytest.mark.parametrize(
+    "setting_secure, extra_file, expected_enabled",
+    (
+        (True, False, True),
+        (True, True, False),
+        (False, False, False),
+        (False, True, False),
+    ),
+)
+def test_secure_boot_vars(tmp_path, setting_secure, extra_file, expected_enabled):
+    secure_boot_path = tmp_path / "secure-boot"
+    secure_boot_path_vars = secure_boot_path / "vars" / "SecureBoot-dog"
+    secure_boot_path_vars1 = secure_boot_path_vars / "SecureBoot-dog"
+    secure_boot_path_vars1.mkdir(parents=True, exist_ok=True)
+    secure_boot_filepath = secure_boot_path_vars1 / "data"
+
+    if setting_secure:
+        secure_boot_filepath.write_bytes(b"\x06\x00\x00\x00\x01")
+    else:
+        secure_boot_filepath.write_bytes(b"\x06\x00\x00\x00\x00")
+
+    if extra_file:
+        secure_boot_path_vars2 = secure_boot_path_vars / "SecureBoot-kat"
+        secure_boot_path_vars2.mkdir(parents=True, exist_ok=True)
+        secure_boot_filepath2 = secure_boot_path_vars2 / "data"
+        if setting_secure:
+            secure_boot_filepath2.write_bytes(b"\x06\x00\x00\x00\x01")
+        else:
+            secure_boot_filepath2.write_bytes(b"\x06\x00\x00\x00\x00")
+
+    with patch(
+        "salt.grains.extra.get_secure_boot_path", return_value=secure_boot_path_vars
+    ):
+        grains = salt.grains.extra.uefi()
+        expected = {"efi": True, "efi-secure-boot": expected_enabled}
+        assert grains == expected
+
+    shutil.rmtree(secure_boot_path)
+
+
+@pytest.mark.parametrize(
+    "setting_secure, expected_enabled",
+    (
+        (True, True),
+        (False, False),
+        (False, False),
+        (False, False),
+    ),
+)
+def test_secure_boot_efivars_and_vars(tmp_path, setting_secure, expected_enabled):
+    secure_boot_path = tmp_path / "secure-boot"
+    secure_boot_path_vars = secure_boot_path / "efivars"
+    secure_boot_path_vars.mkdir(parents=True, exist_ok=True)
+    secure_boot_filepath = secure_boot_path_vars / "SecureBoot-dog"
+
+    secure_boot_path_vars2 = secure_boot_path / "vars" / "SecureBoot-kat"
+    secure_boot_path_vars2.mkdir(parents=True, exist_ok=True)
+    secure_boot_filepath2 = secure_boot_path_vars2 / "data"
+
+    if setting_secure:
+        # efivars True, vars / data False
+        secure_boot_filepath.write_bytes(b"\x06\x00\x00\x00\x01")
+        secure_boot_filepath2.write_bytes(b"\x06\x00\x00\x00\x00")
+    else:
+        # efivars false, vars / data True
+        secure_boot_filepath.write_bytes(b"\x06\x00\x00\x00\x00")
+        secure_boot_filepath2.write_bytes(b"\x06\x00\x00\x00\x01")
+
+    with patch(
+        "salt.grains.extra.get_secure_boot_path", return_value=secure_boot_path_vars
+    ):
+        grains = salt.grains.extra.uefi()
+        expected = {"efi": True, "efi-secure-boot": expected_enabled}
+        assert grains == expected
+
+    shutil.rmtree(secure_boot_path)
diff --git a/tests/pytests/unit/modules/file/test_file_block_replace.py b/tests/pytests/unit/modules/file/test_file_block_replace.py
index 8a05154f41c..8cc9b818b51 100644
--- a/tests/pytests/unit/modules/file/test_file_block_replace.py
+++ b/tests/pytests/unit/modules/file/test_file_block_replace.py
@@ -48,6 +48,7 @@ def configure_loader_modules():
             "__grains__": grains,
             "__utils__": {
                 "files.is_binary": MagicMock(return_value=False),
+                "files.is_text": salt.utils.files.is_text,
                 "files.get_encoding": MagicMock(return_value="utf-8"),
                 "stringutils.get_diff": salt.utils.stringutils.get_diff,
             },
@@ -546,3 +547,17 @@ def test_unfinished_block_exception(multiline_file):
             content="foobar",
             backup=False,
         )
+
+
+def test_search_proc_file():
+    """
+    Test that searching content in a /proc file does not raise a TypeError
+    and handles bytes correctly.
+    """
+    proc_file_path = "/proc/cpuinfo"
+
+    if not os.path.exists(proc_file_path):
+        pytest.skip(f"{proc_file_path} not available")
+
+    match_found = filemod.search(proc_file_path, "processor")
+    assert match_found, "Failed to find 'processor' in /proc/cpuinfo"
diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py
index f95bdcf63f6..e0a07d8bb26 100644
--- a/tests/pytests/unit/modules/test_aptpkg.py
+++ b/tests/pytests/unit/modules/test_aptpkg.py
@@ -450,7 +450,7 @@ def test_get_http_proxy_url_username_passwd():
     """
     Test _get_http_proxy_url when username and passwod set
     """
-    host = "repo.saltproject.io"
+    host = "packages.broadcom.com"
     port = "888"
     user = "user"
     passwd = "password"
@@ -466,7 +466,7 @@ def test_get_http_proxy_url():
     """
     Test basic functionality for _get_http_proxy_url
     """
-    host = "repo.saltproject.io"
+    host = "packages.broadcom.com"
     port = "888"
     user = ""
     passwd = ""
diff --git a/tests/pytests/unit/modules/test_cmdmod.py b/tests/pytests/unit/modules/test_cmdmod.py
index cfc031fc063..e1f2a604cd1 100644
--- a/tests/pytests/unit/modules/test_cmdmod.py
+++ b/tests/pytests/unit/modules/test_cmdmod.py
@@ -1059,7 +1059,14 @@ def test_prep_powershell_cmd_no_powershell():
         )
 
 
-def test_prep_powershell_cmd():
+@pytest.mark.parametrize(
+    "cmd, parsed",
+    [
+        ("Write-Host foo", "& Write-Host foo"),
+        ("$PSVersionTable", "$PSVersionTable"),
+    ],
+)
+def test_prep_powershell_cmd(cmd, parsed):
     """
     Tests _prep_powershell_cmd returns correct cmd
     """
@@ -1068,7 +1075,7 @@ def test_prep_powershell_cmd():
         "salt.utils.path.which", return_value="C:\\powershell.exe"
     ):
         ret = cmdmod._prep_powershell_cmd(
-            win_shell="powershell", cmd="$PSVersionTable", encoded_cmd=False
+            win_shell="powershell", cmd=cmd, encoded_cmd=False
         )
         expected = [
             "C:\\powershell.exe",
@@ -1077,7 +1084,7 @@ def test_prep_powershell_cmd():
             "-ExecutionPolicy",
             "Bypass",
             "-Command",
-            "& {$PSVersionTable}",
+            parsed,
         ]
         assert ret == expected
 
diff --git a/tests/pytests/unit/modules/test_ini_manage.py b/tests/pytests/unit/modules/test_ini_manage.py
index 499bae71e06..e226f34dfaa 100644
--- a/tests/pytests/unit/modules/test_ini_manage.py
+++ b/tests/pytests/unit/modules/test_ini_manage.py
@@ -94,24 +94,22 @@ def test_get_option(encoding, linesep, ini_file, ini_content):
     )
     ini_file.write_bytes(content)
 
-    assert (
-        ini.get_option(str(ini_file), "main", "test1", encoding=encoding) == "value 1"
-    )
-    assert (
-        ini.get_option(str(ini_file), "main", "test2", encoding=encoding) == "value 2"
-    )
-    assert (
-        ini.get_option(str(ini_file), "SectionB", "test1", encoding=encoding)
-        == "value 1B"
-    )
-    assert (
-        ini.get_option(str(ini_file), "SectionB", "test3", encoding=encoding)
-        == "value 3B"
-    )
-    assert (
-        ini.get_option(str(ini_file), "SectionC", "empty_option", encoding=encoding)
-        == ""
+    option = ini.get_option(str(ini_file), "main", "test1", encoding=encoding)
+    assert option == "value 1"
+
+    option = ini.get_option(str(ini_file), "main", "test2", encoding=encoding)
+    assert option == "value 2"
+
+    option = ini.get_option(str(ini_file), "SectionB", "test1", encoding=encoding)
+    assert option == "value 1B"
+
+    option = ini.get_option(str(ini_file), "SectionB", "test3", encoding=encoding)
+    assert option == "value 3B"
+
+    option = ini.get_option(
+        str(ini_file), "SectionC", "empty_option", encoding=encoding
     )
+    assert option == ""
 
 
 @pytest.mark.parametrize("linesep", ["\r", "\n", "\r\n"])
@@ -249,11 +247,12 @@ def test_set_option(encoding, linesep, ini_file, ini_content):
     )
 
 
+@pytest.mark.parametrize("no_spaces", [True, False])
 @pytest.mark.parametrize("linesep", ["\r", "\n", "\r\n"])
 @pytest.mark.parametrize(
     "encoding", [None, "cp1252" if sys.platform == "win32" else "ISO-2022-JP"]
 )
-def test_empty_value(encoding, linesep, ini_file, ini_content):
+def test_empty_value(encoding, linesep, no_spaces, ini_file, ini_content):
     """
     Test empty value preserved after edit
     """
@@ -263,19 +262,23 @@ def test_empty_value(encoding, linesep, ini_file, ini_content):
     ini_file.write_bytes(content)
 
     ini.set_option(
-        str(ini_file), {"SectionB": {"test3": "new value 3B"}}, encoding=encoding
+        str(ini_file),
+        {"SectionB": {"test3": "new value 3B"}},
+        encoding=encoding,
+        no_spaces=no_spaces,
     )
     with salt.utils.files.fopen(str(ini_file), "r") as fp_:
         file_content = salt.utils.stringutils.to_unicode(fp_.read(), encoding=encoding)
-    expected = "{0}{1}{0}".format(os.linesep, "empty_option = ")
+    expected = f"{os.linesep}empty_option{'=' if no_spaces else ' = '}{os.linesep}"
     assert expected in file_content, "empty_option was not preserved"
 
 
+@pytest.mark.parametrize("no_spaces", [True, False])
 @pytest.mark.parametrize("linesep", ["\r", "\n", "\r\n"])
 @pytest.mark.parametrize(
     "encoding", [None, "cp1252" if sys.platform == "win32" else "ISO-2022-JP"]
 )
-def test_empty_lines(encoding, linesep, ini_file, ini_content):
+def test_empty_lines(encoding, linesep, no_spaces, ini_file, ini_content):
     """
     Test empty lines preserved after edit
     """
@@ -289,42 +292,48 @@ def test_empty_lines(encoding, linesep, ini_file, ini_content):
             "# Comment on the first line",
             "",
             "# First main option",
-            "option1 = main1",
+            f"option1{'=' if no_spaces else ' = '}main1",
             "",
             "# Second main option",
-            "option2 = main2",
+            f"option2{'=' if no_spaces else ' = '}main2",
             "",
             "[main]",
             "# Another comment",
-            "test1 = value 1",
+            f"test1{'=' if no_spaces else ' = '}value 1",
             "",
-            "test2 = value 2",
+            f"test2{'=' if no_spaces else ' = '}value 2",
             "",
             "[SectionB]",
-            "test1 = value 1B",
+            f"test1{'=' if no_spaces else ' = '}value 1B",
             "",
             "# Blank line should be above",
-            "test3 = new value 3B",
+            f"test3{'=' if no_spaces else ' = '}new value 3B",
             "",
             "[SectionC]",
             "# The following option is empty",
-            "empty_option = ",
+            f"empty_option{'=' if no_spaces else ' = '}",
             "",
         ]
     )
     ini.set_option(
-        str(ini_file), {"SectionB": {"test3": "new value 3B"}}, encoding=encoding
+        str(ini_file),
+        {"SectionB": {"test3": "new value 3B"}},
+        encoding=encoding,
+        no_spaces=no_spaces,
     )
     with salt.utils.files.fopen(str(ini_file), "r") as fp_:
         file_content = fp_.read()
     assert expected == file_content
 
 
+@pytest.mark.parametrize("no_spaces", [True, False])
 @pytest.mark.parametrize("linesep", ["\r", "\n", "\r\n"])
 @pytest.mark.parametrize(
     "encoding", [None, "cp1252" if sys.platform == "win32" else "ISO-2022-JP"]
 )
-def test_empty_lines_multiple_edits(encoding, linesep, ini_file, ini_content):
+def test_empty_lines_multiple_edits(
+    encoding, linesep, no_spaces, ini_file, ini_content
+):
     """
     Test empty lines preserved after multiple edits
     """
@@ -337,6 +346,7 @@ def test_empty_lines_multiple_edits(encoding, linesep, ini_file, ini_content):
         str(ini_file),
         {"SectionB": {"test3": "this value will be edited two times"}},
         encoding=encoding,
+        no_spaces=no_spaces,
     )
 
     expected = os.linesep.join(
@@ -344,31 +354,34 @@ def test_empty_lines_multiple_edits(encoding, linesep, ini_file, ini_content):
             "# Comment on the first line",
             "",
             "# First main option",
-            "option1 = main1",
+            f"option1{'=' if no_spaces else ' = '}main1",
             "",
             "# Second main option",
-            "option2 = main2",
+            f"option2{'=' if no_spaces else ' = '}main2",
             "",
             "[main]",
             "# Another comment",
-            "test1 = value 1",
+            f"test1{'=' if no_spaces else ' = '}value 1",
             "",
-            "test2 = value 2",
+            f"test2{'=' if no_spaces else ' = '}value 2",
             "",
             "[SectionB]",
-            "test1 = value 1B",
+            f"test1{'=' if no_spaces else ' = '}value 1B",
             "",
             "# Blank line should be above",
-            "test3 = new value 3B",
+            f"test3{'=' if no_spaces else ' = '}new value 3B",
             "",
             "[SectionC]",
             "# The following option is empty",
-            "empty_option = ",
+            f"empty_option{'=' if no_spaces else ' = '}",
             "",
         ]
     )
     ini.set_option(
-        str(ini_file), {"SectionB": {"test3": "new value 3B"}}, encoding=encoding
+        str(ini_file),
+        {"SectionB": {"test3": "new value 3B"}},
+        encoding=encoding,
+        no_spaces=no_spaces,
     )
     with salt.utils.files.fopen(str(ini_file), "r") as fp_:
         file_content = fp_.read()
diff --git a/tests/pytests/unit/modules/test_pip.py b/tests/pytests/unit/modules/test_pip.py
index b2b5a8988d0..1622a848238 100644
--- a/tests/pytests/unit/modules/test_pip.py
+++ b/tests/pytests/unit/modules/test_pip.py
@@ -10,6 +10,10 @@ import salt.utils.platform
 from salt.exceptions import CommandExecutionError
 from tests.support.mock import MagicMock, patch
 
+TARGET = []
+if os.environ.get("VENV_PIP_TARGET"):
+    TARGET = ["--target", os.environ.get("VENV_PIP_TARGET")]
+
 
 class FakeFopen:
     def __init__(self, filename):
@@ -97,6 +101,7 @@ def test_install_frozen_app(python_binary):
                 expected = [
                     *python_binary,
                     "install",
+                    *TARGET,
                     pkg,
                 ]
                 mock.assert_called_with(
@@ -118,6 +123,7 @@ def test_install_source_app(python_binary):
                 expected = [
                     *python_binary,
                     "install",
+                    *TARGET,
                     pkg,
                 ]
                 mock.assert_called_with(
@@ -138,6 +144,7 @@ def test_fix4361(python_binary):
             "install",
             "--requirement",
             "requirements.txt",
+            *TARGET,
         ]
         mock.assert_called_with(
             expected_cmd,
@@ -164,7 +171,7 @@ def test_install_multiple_editable(python_binary):
         "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
     ]
 
-    expected = [*python_binary, "install"]
+    expected = [*python_binary, "install", *TARGET]
     for item in editables:
         expected.extend(["--editable", item])
 
@@ -200,7 +207,7 @@ def test_install_multiple_pkgs_and_editables(python_binary):
         "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
     ]
 
-    expected = [*python_binary, "install"]
+    expected = [*python_binary, "install", *TARGET]
     expected.extend(pkgs)
     for item in editables:
         expected.extend(["--editable", item])
@@ -236,6 +243,7 @@ def test_install_multiple_pkgs_and_editables(python_binary):
         expected = [
             *python_binary,
             "install",
+            *TARGET,
             pkgs[0],
             "--editable",
             editables[0],
@@ -263,7 +271,7 @@ def test_issue5940_install_multiple_pip_mirrors(python_binary):
         expected = [*python_binary, "install", "--use-mirrors"]
         for item in mirrors:
             expected.extend(["--mirrors", item])
-        expected.append("pep8")
+        expected = [*expected, *TARGET, "pep8"]
 
         # Passing mirrors as a list
         mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
@@ -295,6 +303,7 @@ def test_issue5940_install_multiple_pip_mirrors(python_binary):
             "--use-mirrors",
             "--mirrors",
             mirrors[0],
+            *TARGET,
             "pep8",
         ]
 
@@ -322,7 +331,7 @@ def test_install_with_multiple_find_links(python_binary):
     expected = [*python_binary, "install"]
     for item in find_links:
         expected.extend(["--find-links", item])
-    expected.append(pkg)
+    expected = [*expected, *TARGET, pkg]
 
     # Passing mirrors as a list
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
@@ -365,6 +374,7 @@ def test_install_with_multiple_find_links(python_binary):
         "install",
         "--find-links",
         find_links[0],
+        *TARGET,
         pkg,
     ]
 
@@ -430,6 +440,7 @@ def test_install_cached_requirements_used(python_binary):
                 "install",
                 "--requirement",
                 "my_cached_reqs",
+                *TARGET,
             ]
             mock.assert_called_with(
                 expected,
@@ -486,6 +497,7 @@ def test_install_log_argument_in_resulting_command(python_binary, tmp_path):
                 "install",
                 "--log",
                 log_path,
+                *TARGET,
                 pkg,
             ]
             mock.assert_called_with(
@@ -516,7 +528,7 @@ def test_install_timeout_argument_in_resulting_command(python_binary):
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, timeout=10)
         mock.assert_called_with(
-            expected + [10, pkg],
+            expected + [10, *TARGET, pkg],
             saltenv="base",
             runas=None,
             use_vt=False,
@@ -528,7 +540,7 @@ def test_install_timeout_argument_in_resulting_command(python_binary):
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, timeout="10")
         mock.assert_called_with(
-            expected + ["10", pkg],
+            expected + ["10", *TARGET, pkg],
             saltenv="base",
             runas=None,
             use_vt=False,
@@ -552,6 +564,7 @@ def test_install_index_url_argument_in_resulting_command(python_binary):
             "install",
             "--index-url",
             index_url,
+            *TARGET,
             pkg,
         ]
         mock.assert_called_with(
@@ -574,6 +587,7 @@ def test_install_extra_index_url_argument_in_resulting_command(python_binary):
             "install",
             "--extra-index-url",
             extra_index_url,
+            *TARGET,
             pkg,
         ]
         mock.assert_called_with(
@@ -590,7 +604,7 @@ def test_install_no_index_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, no_index=True)
-        expected = [*python_binary, "install", "--no-index", pkg]
+        expected = [*python_binary, "install", "--no-index", *TARGET, pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -606,7 +620,7 @@ def test_install_build_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, build=build)
-        expected = [*python_binary, "install", "--build", build, pkg]
+        expected = [*python_binary, "install", "--build", build, *TARGET, pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -641,6 +655,7 @@ def test_install_download_argument_in_resulting_command(python_binary):
         expected = [
             *python_binary,
             "install",
+            *TARGET,
             "--download",
             download,
             pkg,
@@ -659,7 +674,7 @@ def test_install_no_download_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, no_download=True)
-        expected = [*python_binary, "install", "--no-download", pkg]
+        expected = [*python_binary, "install", *TARGET, "--no-download", pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -686,6 +701,7 @@ def test_install_download_cache_dir_arguments_in_resulting_command(python_binary
                 expected = [
                     *python_binary,
                     "install",
+                    *TARGET,
                     cmd_arg,
                     download_cache,
                     pkg,
@@ -715,7 +731,7 @@ def test_install_source_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, source=source)
-        expected = [*python_binary, "install", "--source", source, pkg]
+        expected = [*python_binary, "install", *TARGET, "--source", source, pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -734,6 +750,7 @@ def test_install_exists_action_argument_in_resulting_command(python_binary):
             expected = [
                 *python_binary,
                 "install",
+                *TARGET,
                 "--exists-action",
                 action,
                 pkg,
@@ -756,7 +773,7 @@ def test_install_install_options_argument_in_resulting_command(python_binary):
     install_options = ["--exec-prefix=/foo/bar", "--install-scripts=/foo/bar/bin"]
     pkg = "pep8"
 
-    expected = [*python_binary, "install"]
+    expected = [*python_binary, "install", *TARGET]
     for item in install_options:
         expected.extend(["--install-option", item])
     expected.append(pkg)
@@ -792,6 +809,7 @@ def test_install_install_options_argument_in_resulting_command(python_binary):
         expected = [
             *python_binary,
             "install",
+            *TARGET,
             "--install-option",
             install_options[0],
             pkg,
@@ -809,7 +827,7 @@ def test_install_global_options_argument_in_resulting_command(python_binary):
     global_options = ["--quiet", "--no-user-cfg"]
     pkg = "pep8"
 
-    expected = [*python_binary, "install"]
+    expected = [*python_binary, "install", *TARGET]
     for item in global_options:
         expected.extend(["--global-option", item])
     expected.append(pkg)
@@ -845,6 +863,7 @@ def test_install_global_options_argument_in_resulting_command(python_binary):
         expected = [
             *python_binary,
             "install",
+            *TARGET,
             "--global-option",
             global_options[0],
             pkg,
@@ -863,7 +882,7 @@ def test_install_upgrade_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, upgrade=True)
-        expected = [*python_binary, "install", "--upgrade", pkg]
+        expected = [*python_binary, "install", *TARGET, "--upgrade", pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -881,6 +900,7 @@ def test_install_force_reinstall_argument_in_resulting_command(python_binary):
         expected = [
             *python_binary,
             "install",
+            *TARGET,
             "--force-reinstall",
             pkg,
         ]
@@ -901,6 +921,7 @@ def test_install_ignore_installed_argument_in_resulting_command(python_binary):
         expected = [
             *python_binary,
             "install",
+            *TARGET,
             "--ignore-installed",
             pkg,
         ]
@@ -918,7 +939,7 @@ def test_install_no_deps_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, no_deps=True)
-        expected = [*python_binary, "install", "--no-deps", pkg]
+        expected = [*python_binary, "install", *TARGET, "--no-deps", pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -933,7 +954,7 @@ def test_install_no_install_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, no_install=True)
-        expected = [*python_binary, "install", "--no-install", pkg]
+        expected = [*python_binary, "install", *TARGET, "--no-install", pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -949,7 +970,7 @@ def test_install_proxy_argument_in_resulting_command(python_binary):
     mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         pip.install(pkg, proxy=proxy)
-        expected = [*python_binary, "install", "--proxy", proxy, pkg]
+        expected = [*python_binary, "install", "--proxy", proxy, *TARGET, pkg]
         mock.assert_called_with(
             expected,
             saltenv="base",
@@ -976,7 +997,7 @@ def test_install_proxy_false_argument_in_resulting_command(python_binary):
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         with patch.dict(pip.__opts__, config_mock):
             pip.install(pkg, proxy=proxy)
-            expected = [*python_binary, "install", pkg]
+            expected = [*python_binary, "install", *TARGET, pkg]
             mock.assert_called_with(
                 expected,
                 saltenv="base",
@@ -1007,6 +1028,7 @@ def test_install_global_proxy_in_resulting_command(python_binary):
                 "install",
                 "--proxy",
                 proxy,
+                *TARGET,
                 pkg,
             ]
             mock.assert_called_with(
@@ -1027,6 +1049,7 @@ def test_install_multiple_requirements_arguments_in_resulting_command(python_bin
         expected = [*python_binary, "install"]
         for item in cached_reqs:
             expected.extend(["--requirement", item])
+        expected.extend(TARGET)
 
         # Passing option as a list
         mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
@@ -1063,6 +1086,7 @@ def test_install_multiple_requirements_arguments_in_resulting_command(python_bin
                 "install",
                 "--requirement",
                 cached_reqs[0],
+                *TARGET,
             ]
             mock.assert_called_with(
                 expected,
@@ -1083,6 +1107,7 @@ def test_install_extra_args_arguments_in_resulting_command(python_binary):
         expected = [
             *python_binary,
             "install",
+            *TARGET,
             pkg,
             "--latest-pip-kwarg",
             "param",
@@ -1598,7 +1623,7 @@ def test_install_pre_argument_in_resulting_command(python_binary):
     with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
         with patch("salt.modules.pip.version", MagicMock(return_value="1.3")):
             pip.install(pkg, pre_releases=True)
-            expected = [*python_binary, "install", pkg]
+            expected = [*python_binary, "install", *TARGET, pkg]
             mock.assert_called_with(
                 expected,
                 saltenv="base",
@@ -1614,7 +1639,7 @@ def test_install_pre_argument_in_resulting_command(python_binary):
     ):
         with patch("salt.modules.pip._get_pip_bin", MagicMock(return_value=["pip"])):
             pip.install(pkg, pre_releases=True)
-            expected = ["pip", "install", "--pre", pkg]
+            expected = ["pip", "install", *TARGET, "--pre", pkg]
             mock_run_all.assert_called_with(
                 expected,
                 saltenv="base",
diff --git a/tests/pytests/unit/modules/test_selinux.py b/tests/pytests/unit/modules/test_selinux.py
index b67a1b52577..0ceba06a134 100644
--- a/tests/pytests/unit/modules/test_selinux.py
+++ b/tests/pytests/unit/modules/test_selinux.py
@@ -6,6 +6,8 @@ import salt.modules.selinux as selinux
 from salt.exceptions import SaltInvocationError
 from tests.support.mock import MagicMock, mock_open, patch
 
+pytestmark = [pytest.mark.skip_unless_on_linux]
+
 
 @pytest.fixture
 def configure_loader_modules():
@@ -399,7 +401,39 @@ def test_selinux_add_policy_regex(name, sel_type):
     ):
         selinux.fcontext_add_policy(name, sel_type=sel_type)
         filespec = re.escape(name)
-        expected_cmd_shell = f"semanage fcontext -l | egrep '{filespec}'"
+        expected_cmd_shell = f"semanage fcontext -l | grep -E '{filespec} '"
+        mock_cmd_shell.assert_called_once_with(
+            expected_cmd_shell,
+            ignore_retcode=True,
+        )
+        expected_cmd_run_all = (
+            f"semanage fcontext --modify --type {sel_type} {filespec}"
+        )
+        mock_cmd_run_all.assert_called_once_with(
+            expected_cmd_run_all,
+        )
+
+
+@pytest.mark.parametrize(
+    "name,sel_type",
+    (
+        ("/usr/share/munin/plugins/mysql_queries", "services_munin_plugin_exec_t"),
+        ("/usr/share/munin/plugins/mysql_", "unconfined_munin_plugin_exec_t"),
+    ),
+)
+def test_selinux_add_policy_shorter_path(name, sel_type):
+    """
+    Test adding policy with a shorter path than an existing entry
+    """
+    mock_cmd_shell = MagicMock(return_value={"retcode": 0})
+    mock_cmd_run_all = MagicMock(return_value={"retcode": 0})
+
+    with patch.dict(selinux.__salt__, {"cmd.shell": mock_cmd_shell}), patch.dict(
+        selinux.__salt__, {"cmd.run_all": mock_cmd_run_all}
+    ):
+        selinux.fcontext_add_policy(name, sel_type=sel_type)
+        filespec = re.escape(name)
+        expected_cmd_shell = f"semanage fcontext -l | grep -E '{filespec} '"
         mock_cmd_shell.assert_called_once_with(
             expected_cmd_shell,
             ignore_retcode=True,
diff --git a/tests/pytests/unit/modules/test_win_file.py b/tests/pytests/unit/modules/test_win_file.py
index 83667bb6377..d327defe56d 100644
--- a/tests/pytests/unit/modules/test_win_file.py
+++ b/tests/pytests/unit/modules/test_win_file.py
@@ -2,8 +2,10 @@ import os
 import re
 
 import pytest
+from saltfactories.utils import random_string
 
 import salt.modules.win_file as win_file
+import salt.modules.win_useradd
 import salt.utils.user
 import salt.utils.win_dacl
 from salt.exceptions import CommandExecutionError
@@ -20,6 +22,14 @@ def configure_loader_modules():
     }
 
 
+@pytest.fixture(scope="module")
+def test_user():
+    user_name = random_string("test-")
+    salt.modules.win_useradd.add(name=user_name, password="P@ssw0rd")
+    yield user_name
+    salt.modules.win_useradd.delete(name=user_name)
+
+
 def test__virtual__not_windows():
     with patch("salt.utils.platform.is_windows", autospec=True, return_value=False):
         expected = (False, "Module win_file: Missing Win32 modules")
@@ -126,9 +136,9 @@ def test_uid_to_user_empty():
     assert result == expected
 
 
-def test_user_to_uid():
-    result = win_file.user_to_uid("Administrator")
-    expected = salt.utils.win_dacl.get_sid_string("Administrator")
+def test_user_to_uid(test_user):
+    result = win_file.user_to_uid(test_user)
+    expected = salt.utils.win_dacl.get_sid_string(test_user)
     assert result == expected
 
 
@@ -218,6 +228,7 @@ def test_check_perms(tmp_path):
             ret=ret,
             owner="Guests",
             grant_perms=grant_perms,
+            inheritance=False,
         )
 
     expected = {
@@ -234,7 +245,7 @@ def test_check_perms(tmp_path):
         "result": True,
     }
 
-    assert result == expected
+    assert result["changes"]["grant_perms"] == expected["changes"]["grant_perms"]
     owner = win_file.get_user(str(test_dir))
     assert owner == "Guests"
     perms = salt.utils.win_dacl.get_permissions(str(test_dir))
diff --git a/tests/pytests/unit/modules/test_win_pkg.py b/tests/pytests/unit/modules/test_win_pkg.py
index 3ae8f24f8dd..a976b6d6083 100644
--- a/tests/pytests/unit/modules/test_win_pkg.py
+++ b/tests/pytests/unit/modules/test_win_pkg.py
@@ -6,6 +6,7 @@ import logging
 
 import pytest
 
+import salt.loader.dunder
 import salt.modules.config as config
 import salt.modules.cp as cp
 import salt.modules.pkg_resource as pkg_resource
@@ -57,7 +58,7 @@ def configure_loader_modules(minion_opts):
     opts = minion_opts
     opts["master_uri"] = "localhost"
     return {
-        cp: {"__opts__": opts},
+        cp: {"__opts__": salt.loader.dunder.__opts__.with_default(opts)},
         win_pkg: {
             "_get_latest_package_version": MagicMock(return_value="3.03"),
             "_get_package_info": MagicMock(return_value=pkg_info),
diff --git a/tests/pytests/unit/modules/test_win_status.py b/tests/pytests/unit/modules/test_win_status.py
index c941b9ccfa1..236e164935b 100644
--- a/tests/pytests/unit/modules/test_win_status.py
+++ b/tests/pytests/unit/modules/test_win_status.py
@@ -16,9 +16,11 @@ def test__get_connected_ips():
     conns = psutil.net_connections()
     for conn in conns:
         if conn.status == psutil.CONN_ESTABLISHED:
-            ip = conn.laddr.ip
-            port = conn.laddr.port
+            ip = conn.raddr.ip
+            port = conn.raddr.port
             break
     assert port is not None
     assert ip is not None
-    assert win_status._get_connected_ips(port) == {ip}
+    # Since this may return more than one IP, let's make sure our test IP is in
+    # the list of IPs
+    assert ip in win_status._get_connected_ips(port)
diff --git a/tests/pytests/unit/modules/test_yaml.py b/tests/pytests/unit/modules/test_yaml.py
index 1f00af710c8..75bad8b5cf1 100644
--- a/tests/pytests/unit/modules/test_yaml.py
+++ b/tests/pytests/unit/modules/test_yaml.py
@@ -13,7 +13,7 @@ try:
     import salt.modules.yaml
     import salt.utils.yamllint
 
-    YAMLLINT_AVAILABLE = True
+    YAMLLINT_AVAILABLE = salt.utils.yamllint.has_yamllint()
 except ImportError:
     YAMLLINT_AVAILABLE = False
 
diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py
index ecdad27ac83..179f3113d55 100644
--- a/tests/pytests/unit/modules/test_yumpkg.py
+++ b/tests/pytests/unit/modules/test_yumpkg.py
@@ -1428,10 +1428,8 @@ def test_install_with_options():
                     "--disablerepo=*",
                     "--enablerepo=good",
                     "--branch=foo",
-                    "--setopt",
-                    "obsoletes=0",
-                    "--setopt",
-                    "plugins=0",
+                    "--setopt=obsoletes=0",
+                    "--setopt=plugins=0",
                     "install",
                     "foo",
                 ],
@@ -1459,10 +1457,8 @@ def test_install_with_options():
                     "--disablerepo=bad",
                     "--enablerepo=good",
                     "--branch=foo",
-                    "--setopt",
-                    "obsoletes=0",
-                    "--setopt",
-                    "plugins=0",
+                    "--setopt=obsoletes=0",
+                    "--setopt=plugins=0",
                     "install",
                     "foo",
                 ],
@@ -1857,10 +1853,8 @@ def test_upgrade_with_options():
                     "--disablerepo=*",
                     "--enablerepo=good",
                     "--branch=foo",
-                    "--setopt",
-                    "obsoletes=0",
-                    "--setopt",
-                    "plugins=0",
+                    "--setopt=obsoletes=0",
+                    "--setopt=plugins=0",
                     "--exclude=kernel*",
                     "--nogpgcheck",
                     "upgrade",
@@ -1902,10 +1896,8 @@ def test_upgrade_with_options():
                     "--disablerepo=bad",
                     "--enablerepo=good",
                     "--branch=foo",
-                    "--setopt",
-                    "obsoletes=0",
-                    "--setopt",
-                    "plugins=0",
+                    "--setopt=obsoletes=0",
+                    "--setopt=plugins=0",
                     "--exclude=kernel*",
                     "upgrade",
                 ],
@@ -3051,10 +3043,8 @@ def test_pkg_update_dnf():
                 "dnf",
                 "--quiet",
                 "-y",
-                "--setopt",
-                "plugins=0",
-                "--setopt",
-                "obsoletes=False",
+                "--setopt=plugins=0",
+                "--setopt=obsoletes=False",
                 "upgrade",
                 "foo",
             ],
diff --git a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py
index 5849e309398..a0c4213cfe4 100644
--- a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py
+++ b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py
@@ -226,15 +226,15 @@ def test_sidConversion_no_conversion(pol_info):
     assert pol_info._sidConversion([val]) == expected
 
 
-def test_sidConversion_everyone(pol_info):
+def test_sidConversion_sid(pol_info):
     val = ws.ConvertStringSidToSid("S-1-1-0")
     expected = ["Everyone"]
     assert pol_info._sidConversion([val]) == expected
 
 
-def test_sidConversion_administrator(pol_info):
-    val = ws.LookupAccountName("", "Administrator")[0]
-    expected = [f"{socket.gethostname()}\\Administrator"]
+def test_sidConversion_name(pol_info):
+    val = ws.LookupAccountName("", "DefaultAccount")[0]
+    expected = [f"{socket.gethostname()}\\DefaultAccount"]
     assert pol_info._sidConversion([val]) == expected
 
 
@@ -250,8 +250,8 @@ def test_usernamesToSidObjects_empty_value(pol_info, val, expected):
 
 
 def test_usernamesToSidObjects_string_list(pol_info):
-    val = "Administrator,Guest"
-    admin_sid = ws.LookupAccountName("", "Administrator")[0]
+    val = "DefaultAccount,Guest"
+    admin_sid = ws.LookupAccountName("", "DefaultAccount")[0]
     guest_sid = ws.LookupAccountName("", "Guest")[0]
     expected = [admin_sid, guest_sid]
     assert pol_info._usernamesToSidObjects(val) == expected
diff --git a/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py b/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py
index a220facf250..f20e9149340 100644
--- a/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py
+++ b/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py
@@ -349,6 +349,9 @@ def _test_set_user_policy(lgpo_bin, shell, name, setting, exp_regexes):
             ],
         ),
         (
+            # This will need to be fixed for Windows Server 2025
+            # The bottom two options have been removed in 2025
+            # Though not set here, we're verifying there were set
             "Specify settings for optional component installation and component repair",
             "Disabled",
             [
@@ -358,6 +361,8 @@ def _test_set_user_policy(lgpo_bin, shell, name, setting, exp_regexes):
             ],
         ),
         (
+            # This will need to be fixed for Windows Server 2025
+            # The bottom two options have been removed in 2025
             "Specify settings for optional component installation and component repair",
             {
                 "Alternate source file path": "",
@@ -371,6 +376,8 @@ def _test_set_user_policy(lgpo_bin, shell, name, setting, exp_regexes):
             ],
         ),
         (
+            # This will need to be fixed for Windows Server 2025
+            # The bottom two options have been removed in 2025
             "Specify settings for optional component installation and component repair",
             {
                 "Alternate source file path": r"\\some\fake\server",
@@ -757,3 +764,16 @@ def test_set_computer_policy_multiple_policies(clean_comp, lgpo_bin, shell):
             r"\\AU[\s]*AllowMUUpdateService[\s]*DELETE",
         ],
     )
+
+
+def test__encode_xmlns_url():
+    """
+    Tests the _encode_xmlns_url function.
+    Spaces in the xmlns url should be converted to %20
+    """
+    line = '<policyDefinitionResources xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" revision="1.0" schemaVersion="1.0" xmlns="http://schemas.microsoft.com/GroupPolicy/2006/07/Policysecurity intelligence">'
+    result = re.sub(
+        r'(.*)(\bxmlns(?::\w+)?)\s*=\s*"([^"]+)"(.*)', win_lgpo._encode_xmlns_url, line
+    )
+    expected = '<policyDefinitionResources xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" revision="1.0" schemaVersion="1.0" xmlns="http://schemas.microsoft.com/GroupPolicy/2006/07/Policysecurity%20intelligence">'
+    assert result == expected
diff --git a/tests/pytests/unit/output/test_profile.py b/tests/pytests/unit/output/test_profile.py
new file mode 100644
index 00000000000..8a6234c1821
--- /dev/null
+++ b/tests/pytests/unit/output/test_profile.py
@@ -0,0 +1,94 @@
+import pytest
+
+import salt.output.profile as profile
+
+
+@pytest.fixture
+def configure_loader_modules():
+    return {profile: {"__opts__": {"extension_modules": "", "color": False}}}
+
+
+def test_no_states_found():
+    """
+    Simulate the result of the "profile" outputter with state.apply.
+    i.e. salt-call --local state.apply --output=profile
+    """
+    data = {
+        "local": {
+            "no_|-states_|-states_|-None": {
+                "result": False,
+                "comment": "No Top file or master_tops data matches found. Please see master log for details.",
+                "name": "No States",
+                "changes": {},
+                "__run_num__": 0,
+            }
+        }
+    }
+
+    expected_output = (
+        "    ---------------------------------------\n"
+        "    |    name   | mod.fun | duration (ms) |\n"
+        "    ---------------------------------------\n"
+        "    | No States | no.None |       -1.0000 |\n"
+        "    ---------------------------------------"
+    )
+
+    ret = profile.output(data)
+    assert expected_output in ret
+
+
+def test_no_matching_sls():
+    """
+    Simulate the result of the "profile" outputter with state.sls.
+    i.e. salt-call --local state.sls foo --output=profile
+    """
+    data = {"local": ["No matching sls found for 'foo' in env 'base'"]}
+
+    expected_output = (
+        "    ---------------------------------------------------------------------------\n"
+        "    | name |                     mod.fun                      | duration (ms) |\n"
+        "    ---------------------------------------------------------------------------\n"
+        "    |  <>  | No matching sls found for 'foo' in env 'base'.No |       -1.0000 |\n"
+        "    ---------------------------------------------------------------------------"
+    )
+
+    ret = profile.output(data)
+    assert expected_output in ret
+
+
+def test_output_with_grains_data():
+    """
+    Simulate the result of the "profile" outputter with grains data.
+    i.e. salt-call --local grains.items --output=profile
+    """
+    grains_data = {
+        "local": {
+            "dns": {"nameservers": ["0.0.0.0", "1.1.1.1"], "search": ["dns.com"]},
+            "fqdns": [],
+            "disks": ["sda"],
+            "ssds": ["nvme0n1"],
+            "shell": "/bin/bash",
+            "efi-secure-boot": False,
+        }
+    }
+
+    ret = profile.output(grains_data)
+    expected_ret = (
+        "    ---------------------------------------------------------------------\n"
+        "    |       name      |             mod.fun             | duration (ms) |\n"
+        "    ---------------------------------------------------------------------\n"
+        "    |        <>       |             dns.dns             |       -1.0000 |\n"
+        "    ---------------------------------------------------------------------\n"
+        "    |      disks      |           disks.disks           |       -1.0000 |\n"
+        "    ---------------------------------------------------------------------\n"
+        "    | efi-secure-boot | efi-secure-boot.efi-secure-boot |       -1.0000 |\n"
+        "    ---------------------------------------------------------------------\n"
+        "    |      fqdns      |           fqdns.fqdns           |       -1.0000 |\n"
+        "    ---------------------------------------------------------------------\n"
+        "    |      shell      |           shell.shell           |       -1.0000 |\n"
+        "    ---------------------------------------------------------------------\n"
+        "    |       ssds      |            ssds.ssds            |       -1.0000 |\n"
+        "    ---------------------------------------------------------------------"
+    )
+
+    assert ret == expected_ret
diff --git a/tests/pytests/unit/states/file/test_recurse.py b/tests/pytests/unit/states/file/test_recurse.py
new file mode 100644
index 00000000000..53e6e0fd22f
--- /dev/null
+++ b/tests/pytests/unit/states/file/test_recurse.py
@@ -0,0 +1,48 @@
+import logging
+import os
+import pathlib
+
+import pytest
+
+import salt.states.file as filestate
+from tests.support.mock import MagicMock, patch
+
+log = logging.getLogger(__name__)
+
+
+@pytest.fixture
+def configure_loader_modules():
+    return {filestate: {"__salt__": {}, "__opts__": {}, "__env__": "base"}}
+
+
+def test__gen_recurse_managed_files():
+    """
+    Test _gen_recurse_managed_files to make sure it puts symlinks at the end of the list of files.
+    """
+    target_dir = pathlib.Path(f"{os.sep}some{os.sep}path{os.sep}target")
+    cp_list_master = MagicMock(
+        return_value=[
+            "target/symlink",
+            "target/just_a_file.txt",
+            "target/not_a_symlink/symlink",
+            "target/notasymlink",
+        ],
+    )
+    cp_list_master_symlinks = MagicMock(
+        return_value={
+            "target/symlink": f"{target_dir}{os.sep}not_a_symlink{os.sep}symlink"
+        }
+    )
+    patch_salt = {
+        "cp.list_master": cp_list_master,
+        "cp.list_master_symlinks": cp_list_master_symlinks,
+    }
+    with patch.dict(filestate.__salt__, patch_salt):
+        files, dirs, links, keep = filestate._gen_recurse_managed_files(
+            name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=True
+        )
+    expected = (
+        f"{os.sep}some{os.sep}path{os.sep}target{os.sep}symlink",
+        "salt://target/symlink?saltenv=base",
+    )
+    assert files[-1] == expected
diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py
index e494047f535..903f9cd069d 100644
--- a/tests/pytests/unit/states/test_pkg.py
+++ b/tests/pytests/unit/states/test_pkg.py
@@ -11,6 +11,7 @@ import salt.modules.yumpkg as yumpkg
 import salt.states.beacon as beaconstate
 import salt.states.pkg as pkg
 import salt.utils.state as state_utils
+from salt.loader.dunder import __opts__
 from salt.utils.event import SaltEvent
 from tests.support.mock import MagicMock, patch
 
@@ -21,7 +22,7 @@ log = logging.getLogger(__name__)
 def configure_loader_modules(minion_opts):
     return {
         cp: {
-            "__opts__": minion_opts,
+            "__opts__": __opts__.with_default(minion_opts),
         },
         pkg: {
             "__env__": "base",
diff --git a/tests/pytests/unit/states/test_service.py b/tests/pytests/unit/states/test_service.py
index 6be5afdc8ae..04cdd9b059b 100644
--- a/tests/pytests/unit/states/test_service.py
+++ b/tests/pytests/unit/states/test_service.py
@@ -3,6 +3,7 @@
 """
 
 import logging
+import subprocess
 
 import pytest
 
@@ -16,6 +17,20 @@ from tests.support.mock import MagicMock, patch
 log = logging.getLogger(__name__)
 
 
+def _check_systemctl():
+    if not hasattr(_check_systemctl, "memo"):
+        if not salt.utils.platform.is_linux():
+            _check_systemctl.memo = False
+        else:
+            proc = subprocess.run(["systemctl"], capture_output=True, check=False)
+            _check_systemctl.memo = (
+                b"Failed to get D-Bus connection: No such file or directory"
+                in proc.stderr
+                or b"Failed to connect to bus: No such file or directory" in proc.stderr
+            )
+    return _check_systemctl.memo
+
+
 def func(name):
     """
     Mock func method
@@ -679,6 +694,7 @@ def test_mod_beacon(tmp_path):
                         assert ret == expected
 
 
+@pytest.mark.skipif(_check_systemctl(), reason="systemctl is in a degraded state")
 @pytest.mark.skip_on_darwin(reason="service.running is currently failing on OSX")
 @pytest.mark.skip_if_not_root
 @pytest.mark.destructive_test
diff --git a/tests/pytests/unit/test_fileserver.py b/tests/pytests/unit/test_fileserver.py
index 8dd3ea0a27d..49be3967dc4 100644
--- a/tests/pytests/unit/test_fileserver.py
+++ b/tests/pytests/unit/test_fileserver.py
@@ -75,9 +75,7 @@ def test_file_server_url_escape(tmp_path):
     opts = {
         "fileserver_backend": ["roots"],
         "extension_modules": "",
-        "optimization_order": [
-            0,
-        ],
+        "optimization_order": [0, 1],
         "file_roots": {
             "base": [fileroot],
         },
@@ -102,9 +100,7 @@ def test_file_server_serve_url_escape(tmp_path):
     opts = {
         "fileserver_backend": ["roots"],
         "extension_modules": "",
-        "optimization_order": [
-            0,
-        ],
+        "optimization_order": [0, 1],
         "file_roots": {
             "base": [fileroot],
         },
diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py
index dfbf0b1d8bd..85dac2a0ae7 100644
--- a/tests/pytests/unit/test_minion.py
+++ b/tests/pytests/unit/test_minion.py
@@ -2,6 +2,7 @@ import asyncio
 import copy
 import logging
 import os
+import uuid
 
 import pytest
 import tornado
@@ -102,12 +103,15 @@ def test_minion_load_grains_default(minion_opts):
     ],
 )
 def test_send_req_fires_completion_event(event, minion_opts):
+    req_id = uuid.uuid4()
     event_enter = MagicMock()
     event_enter.send.side_effect = event[1]
     event = MagicMock()
     event.__enter__.return_value = event_enter
 
-    with patch("salt.utils.event.get_event", return_value=event):
+    with patch("salt.utils.event.get_event", return_value=event), patch(
+        "uuid.uuid4", return_value=req_id
+    ):
         minion_opts["random_startup_delay"] = 0
         minion_opts["return_retry_tries"] = 30
         minion_opts["grains"] = {}
@@ -132,7 +136,7 @@ def test_send_req_fires_completion_event(event, minion_opts):
                         condition_event_tag = (
                             len(call.args) > 1
                             and call.args[1]
-                            == f"__master_req_channel_payload/{minion_opts['master']}"
+                            == f"__master_req_channel_payload/{req_id}/{minion_opts['master']}"
                         )
                         condition_event_tag_error = (
                             "{} != {}; Call(number={}): {}".format(
@@ -159,26 +163,42 @@ def test_send_req_fires_completion_event(event, minion_opts):
 
 
 async def test_send_req_async_regression_62453(minion_opts):
-    event_enter = MagicMock()
-    event_enter.send.side_effect = (
-        lambda data, tag, cb=None, timeout=60: tornado.gen.maybe_future(True)
-    )
-    event = MagicMock()
-    event.__enter__.return_value = event_enter
+
+    class MockEvent:
+
+        def __init__(self, *args, **kwargs):
+            pass
+
+        @tornado.gen.coroutine
+        def fire_event_async(self, *args, **kwargs):
+            return
+
+        def get_event(self, *args, **kwargs):
+            return
+
+        def __enter__(self):
+            return self
+
+        def __exit__(self, *args):
+            return
+
+    def get_event(*args, **kwargs):
+        return MockEvent()
 
     minion_opts["random_startup_delay"] = 0
-    minion_opts["return_retry_tries"] = 30
+    minion_opts["return_retry_tries"] = 5
     minion_opts["grains"] = {}
     minion_opts["ipc_mode"] = "tcp"
     with patch("salt.loader.grains"):
         minion = salt.minion.Minion(minion_opts)
 
         load = {"load": "value"}
-        timeout = 60
+        timeout = 1
 
-        # We are just validating no exception is raised
-        rtn = await minion._send_req_async(load, timeout)
-        assert rtn is False
+        with patch("salt.utils.event.get_event", get_event):
+            # We are just validating no exception is raised
+            with pytest.raises(TimeoutError):
+                rtn = await minion._send_req_async(load, timeout)
 
 
 def test_mine_send_tries(minion_opts):
diff --git a/tests/pytests/unit/test_pillar.py b/tests/pytests/unit/test_pillar.py
index d44a337981f..1b29c26248d 100644
--- a/tests/pytests/unit/test_pillar.py
+++ b/tests/pytests/unit/test_pillar.py
@@ -1259,3 +1259,43 @@ def test_compile_pillar_disk_cache(master_opts, grains):
                 "mocked_minion": {"base": {"foo": "bar"}, "dev": {"foo": "baz"}}
             }
             assert pillar.cache._dict == expected_cache
+
+
+def test_remote_pillar_bad_return(grains, tmp_pki):
+    opts = {
+        "pki_dir": tmp_pki,
+        "id": "minion",
+        "master_uri": "tcp://127.0.0.1:4505",
+        "__role": "minion",
+        "keysize": 2048,
+        "saltenv": "base",
+        "pillarenv": "base",
+    }
+    pillar = salt.pillar.RemotePillar(opts, grains, "mocked-minion", "dev")
+
+    async def crypted_transfer_mock():
+        return ""
+
+    pillar.channel.crypted_transfer_decode_dictentry = crypted_transfer_mock
+    with pytest.raises(salt.exceptions.SaltClientError):
+        pillar.compile_pillar()
+
+
+async def test_async_remote_pillar_bad_return(grains, tmp_pki):
+    opts = {
+        "pki_dir": tmp_pki,
+        "id": "minion",
+        "master_uri": "tcp://127.0.0.1:4505",
+        "__role": "minion",
+        "keysize": 2048,
+        "saltenv": "base",
+        "pillarenv": "base",
+    }
+    pillar = salt.pillar.AsyncRemotePillar(opts, grains, "mocked-minion", "dev")
+
+    async def crypted_transfer_mock():
+        return ""
+
+    pillar.channel.crypted_transfer_decode_dictentry = crypted_transfer_mock
+    with pytest.raises(salt.exceptions.SaltClientError):
+        await pillar.compile_pillar()
diff --git a/tests/pytests/unit/test_request_channel.py b/tests/pytests/unit/test_request_channel.py
index 300ee9c2332..746d1bc7398 100644
--- a/tests/pytests/unit/test_request_channel.py
+++ b/tests/pytests/unit/test_request_channel.py
@@ -690,7 +690,7 @@ async def test_req_chan_decode_data_dict_entry_v2(minion_opts, master_opts, pki_
         "cmd": "_pillar",
     }
     try:
-        ret = await client.crypted_transfer_decode_dictentry(
+        ret = await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
             load,
             dictkey="pillar",
         )
@@ -774,7 +774,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(
 
     try:
         with pytest.raises(salt.crypt.AuthenticationError) as excinfo:
-            ret = await client.crypted_transfer_decode_dictentry(
+            ret = await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
                 load,
                 dictkey="pillar",
             )
@@ -866,7 +866,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature(
 
     try:
         with pytest.raises(salt.crypt.AuthenticationError) as excinfo:
-            ret = await client.crypted_transfer_decode_dictentry(
+            ret = await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
                 load,
                 dictkey="pillar",
             )
@@ -960,7 +960,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key(
     }
     try:
         with pytest.raises(salt.crypt.AuthenticationError) as excinfo:
-            await client.crypted_transfer_decode_dictentry(
+            await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
                 load,
                 dictkey="pillar",
             )
diff --git a/tests/pytests/unit/transport/test_publish_client.py b/tests/pytests/unit/transport/test_publish_client.py
index 2372cfa2791..c212d7ef4e1 100644
--- a/tests/pytests/unit/transport/test_publish_client.py
+++ b/tests/pytests/unit/transport/test_publish_client.py
@@ -231,11 +231,10 @@ async def test_publish_client_connect_server_comes_up(transport, io_loop):
         await asyncio.sleep(0.03)
         ctx.term()
     elif transport == "tcp":
-
         client = salt.transport.tcp.PublishClient(opts, io_loop, host=host, port=port)
         # XXX: This is an implimentation detail of the tcp transport.
         # await client.connect(port)
-        io_loop.spawn_callback(client.connect)
+        io_loop.spawn_callback(client.connect, timeout=120)
         assert client._stream is None
         await asyncio.sleep(2)
 
diff --git a/tests/pytests/unit/transport/test_tcp.py b/tests/pytests/unit/transport/test_tcp.py
index d064b561c11..6d5c24b4730 100644
--- a/tests/pytests/unit/transport/test_tcp.py
+++ b/tests/pytests/unit/transport/test_tcp.py
@@ -685,3 +685,74 @@ async def test_pub_server_publish_payload_closed_stream(master_opts, io_loop):
     server.clients = {client}
     await server.publish_payload(package, topic_list)
     assert server.clients == set()
+
+
+async def test_pub_server_paths_no_perms(master_opts, io_loop):
+    def publish_payload(payload):
+        return payload
+
+    pubserv = salt.transport.tcp.PublishServer(
+        master_opts,
+        pub_host="127.0.0.1",
+        pub_port=5151,
+        pull_host="127.0.0.1",
+        pull_port=5152,
+    )
+    assert pubserv.pull_path is None
+    assert pubserv.pub_path is None
+    with patch("os.chmod") as p:
+        await pubserv.publisher(publish_payload)
+        assert p.call_count == 0
+
+
+@pytest.mark.skip_on_windows()
+async def test_pub_server_publisher_pull_path_perms(master_opts, io_loop, tmp_path):
+    def publish_payload(payload):
+        return payload
+
+    pull_path = str(tmp_path / "pull.ipc")
+    pull_path_perms = 0o664
+    pubserv = salt.transport.tcp.PublishServer(
+        master_opts,
+        pub_host="127.0.0.1",
+        pub_port=5151,
+        pull_host=None,
+        pull_port=None,
+        pull_path=pull_path,
+        pull_path_perms=pull_path_perms,
+    )
+    assert pubserv.pull_path == pull_path
+    assert pubserv.pull_path_perms == pull_path_perms
+    assert pubserv.pull_host is None
+    assert pubserv.pull_port is None
+    with patch("os.chmod") as p:
+        await pubserv.publisher(publish_payload)
+        assert p.call_count == 1
+        assert p.call_args.args == (pubserv.pull_path, pubserv.pull_path_perms)
+
+
+@pytest.mark.skip_on_windows()
+async def test_pub_server_publisher_pub_path_perms(master_opts, io_loop, tmp_path):
+    def publish_payload(payload):
+        return payload
+
+    pub_path = str(tmp_path / "pub.ipc")
+    pub_path_perms = 0o664
+    pubserv = salt.transport.tcp.PublishServer(
+        master_opts,
+        pub_host=None,
+        pub_port=None,
+        pub_path=pub_path,
+        pub_path_perms=pub_path_perms,
+        pull_host="127.0.0.1",
+        pull_port=5151,
+        pull_path=None,
+    )
+    assert pubserv.pub_path == pub_path
+    assert pubserv.pub_path_perms == pub_path_perms
+    assert pubserv.pub_host is None
+    assert pubserv.pub_port is None
+    with patch("os.chmod") as p:
+        await pubserv.publisher(publish_payload)
+        assert p.call_count == 1
+        assert p.call_args.args == (pubserv.pub_path, pubserv.pub_path_perms)
diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py
index 609b70ab48a..fbf124fd765 100644
--- a/tests/pytests/unit/transport/test_zeromq.py
+++ b/tests/pytests/unit/transport/test_zeromq.py
@@ -1,8 +1,11 @@
 import ctypes
+import hashlib
 import logging
 import multiprocessing
+import os
 import threading
 import time
+import uuid
 
 import msgpack
 import pytest
@@ -17,7 +20,7 @@ import salt.utils.process
 import salt.utils.stringutils
 from salt.master import SMaster
 from tests.conftest import FIPS_TESTRUN
-from tests.support.mock import AsyncMock, MagicMock
+from tests.support.mock import AsyncMock, MagicMock, patch
 
 log = logging.getLogger(__name__)
 
@@ -354,6 +357,1091 @@ class MockSaltMinionMaster:
         raise tornado.gen.Return((payload, {"fun": "send_clear"}))
 
 
+@pytest.mark.parametrize("message", ["", [], ()])
+def test_badload(temp_salt_minion, temp_salt_master, message):
+    """
+    Test a variety of bad requests, make sure that we get some sort of error
+    """
+    with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master:
+        ret = minion_master.channel.send(message, timeout=5, tries=1)
+        assert ret == "payload and load must be a dict"
+
+
+def test_payload_handling_exception(temp_salt_minion, temp_salt_master):
+    """
+    test of getting exception on payload handling
+    """
+    with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master:
+        with patch.object(minion_master.mock, "_handle_payload_hook") as _mock:
+            _mock.side_effect = Exception()
+            ret = minion_master.channel.send({}, timeout=5, tries=1)
+            assert ret == "Some exception handling minion payload"
+
+
+def test_serverside_exception(temp_salt_minion, temp_salt_master):
+    """
+    test of getting server side exception on payload handling
+    """
+    with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master:
+        with patch.object(minion_master.mock, "_handle_payload_hook") as _mock:
+            _mock.side_effect = tornado.gen.Return(({}, {"fun": "madeup-fun"}))
+            ret = minion_master.channel.send({}, timeout=5, tries=1)
+            assert ret == "Server-side exception handling payload"
+
+
+async def test_zeromq_async_pub_channel_publish_port(temp_salt_master):
+    """
+    test when connecting that we use the publish_port set in opts when its not 4506
+    """
+    opts = dict(
+        temp_salt_master.config.copy(),
+        ipc_mode="ipc",
+        pub_hwm=0,
+        recon_randomize=False,
+        publish_port=455505,
+        recon_default=1,
+        recon_max=2,
+        master_ip="127.0.0.1",
+        acceptance_wait_time=5,
+        acceptance_wait_time_max=5,
+        sign_pub_messages=False,
+    )
+    opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts)
+    ioloop = tornado.ioloop.IOLoop()
+    transport = salt.transport.zeromq.PublishClient(
+        opts, ioloop, host=opts["interface"], port=opts["publish_port"]
+    )
+    with transport:
+        patch_socket = MagicMock(return_value=True)
+        patch_auth = MagicMock(return_value=True)
+        with patch.object(transport, "_socket", patch_socket):
+            await transport.connect(455505)
+    assert str(opts["publish_port"]) in patch_socket.mock_calls[0][1][0]
+
+
+def test_zeromq_async_pub_channel_filtering_decode_message_no_match(
+    temp_salt_master,
+):
+    """
+    test zeromq PublishClient _decode_messages when
+    zmq_filtering enabled and minion does not match
+    """
+    message = [
+        b"4f26aeafdb2367620a393c973eddbe8f8b846eb",
+        b"\x82\xa3enc\xa3aes\xa4load\xda\x00`\xeeR\xcf"
+        b"\x0eaI#V\x17if\xcf\xae\x05\xa7\xb3bN\xf7\xb2\xe2"
+        b'\xd0sF\xd1\xd4\xecB\xe8\xaf"/*ml\x80Q3\xdb\xaexg'
+        b"\x8e\x8a\x8c\xd3l\x03\\,J\xa7\x01i\xd1:]\xe3\x8d"
+        b"\xf4\x03\x88K\x84\n`\xe8\x9a\xad\xad\xc6\x8ea\x15>"
+        b"\x92m\x9e\xc7aM\x11?\x18;\xbd\x04c\x07\x85\x99\xa3\xea[\x00D",
+    ]
+
+    opts = dict(
+        temp_salt_master.config.copy(),
+        ipc_mode="ipc",
+        pub_hwm=0,
+        zmq_filtering=True,
+        recon_randomize=False,
+        recon_default=1,
+        recon_max=2,
+        master_ip="127.0.0.1",
+        acceptance_wait_time=5,
+        acceptance_wait_time_max=5,
+        sign_pub_messages=False,
+    )
+    opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts)
+
+    ioloop = tornado.ioloop.IOLoop()
+    channel = salt.transport.zeromq.PublishClient(
+        opts, ioloop, host=opts["interface"], port=opts["publish_port"]
+    )
+    with channel:
+        with patch(
+            "salt.crypt.AsyncAuth.crypticle",
+            MagicMock(return_value={"tgt_type": "glob", "tgt": "*", "jid": 1}),
+        ):
+            res = channel._decode_messages(message)
+    assert res is None
+
+
+def test_zeromq_async_pub_channel_filtering_decode_message(
+    temp_salt_master, temp_salt_minion
+):
+    """
+    test AsyncZeroMQPublishClient _decode_messages when zmq_filtered enabled
+    """
+    minion_hexid = salt.utils.stringutils.to_bytes(
+        hashlib.sha1(salt.utils.stringutils.to_bytes(temp_salt_minion.id)).hexdigest()
+    )
+
+    message = [
+        minion_hexid,
+        b"\x82\xa3enc\xa3aes\xa4load\xda\x00`\xeeR\xcf"
+        b"\x0eaI#V\x17if\xcf\xae\x05\xa7\xb3bN\xf7\xb2\xe2"
+        b'\xd0sF\xd1\xd4\xecB\xe8\xaf"/*ml\x80Q3\xdb\xaexg'
+        b"\x8e\x8a\x8c\xd3l\x03\\,J\xa7\x01i\xd1:]\xe3\x8d"
+        b"\xf4\x03\x88K\x84\n`\xe8\x9a\xad\xad\xc6\x8ea\x15>"
+        b"\x92m\x9e\xc7aM\x11?\x18;\xbd\x04c\x07\x85\x99\xa3\xea[\x00D",
+    ]
+
+    opts = dict(
+        temp_salt_master.config.copy(),
+        id=temp_salt_minion.id,
+        ipc_mode="ipc",
+        pub_hwm=0,
+        zmq_filtering=True,
+        recon_randomize=False,
+        recon_default=1,
+        recon_max=2,
+        master_ip="127.0.0.1",
+        acceptance_wait_time=5,
+        acceptance_wait_time_max=5,
+        sign_pub_messages=False,
+    )
+    opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts)
+
+    ioloop = tornado.ioloop.IOLoop()
+    channel = salt.transport.zeromq.PublishClient(
+        opts, ioloop, host=opts["interface"], port=opts["publish_port"]
+    )
+    with channel:
+        with patch(
+            "salt.crypt.AsyncAuth.crypticle",
+            MagicMock(return_value={"tgt_type": "glob", "tgt": "*", "jid": 1}),
+        ) as mock_test:
+            res = channel._decode_messages(message)
+
+    assert res["enc"] == "aes"
+
+
+def test_req_server_chan_encrypt_v2(
+    pki_dir, encryption_algorithm, signing_algorithm, master_opts
+):
+    loop = tornado.ioloop.IOLoop.current()
+    master_opts.update(
+        {
+            "worker_threads": 1,
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "zmq_monitor": False,
+            "mworker_queue_niceness": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("master")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+        }
+    )
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    dictkey = "pillar"
+    nonce = "abcdefg"
+    pillar_data = {"pillar1": "meh"}
+    ret = server._encrypt_private(
+        pillar_data,
+        dictkey,
+        "minion",
+        nonce,
+        encryption_algorithm=encryption_algorithm,
+        signing_algorithm=signing_algorithm,
+    )
+    assert "key" in ret
+    assert dictkey in ret
+
+    key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem")))
+    aes = key.decrypt(ret["key"], encryption_algorithm)
+    pcrypt = salt.crypt.Crypticle(master_opts, aes)
+    signed_msg = pcrypt.loads(ret[dictkey])
+
+    assert "sig" in signed_msg
+    assert "data" in signed_msg
+    data = salt.payload.loads(signed_msg["data"])
+    assert "key" in data
+    assert data["key"] == ret["key"]
+    assert "key" in data
+    assert data["nonce"] == nonce
+    assert "pillar" in data
+    assert data["pillar"] == pillar_data
+
+
+def test_req_server_chan_encrypt_v1(pki_dir, encryption_algorithm, master_opts):
+    loop = tornado.ioloop.IOLoop.current()
+    master_opts.update(
+        {
+            "worker_threads": 1,
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "zmq_monitor": False,
+            "mworker_queue_niceness": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("master")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+        }
+    )
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    dictkey = "pillar"
+    nonce = "abcdefg"
+    pillar_data = {"pillar1": "meh"}
+    ret = server._encrypt_private(
+        pillar_data,
+        dictkey,
+        "minion",
+        sign_messages=False,
+        encryption_algorithm=encryption_algorithm,
+    )
+
+    assert "key" in ret
+    assert dictkey in ret
+
+    key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem")))
+    aes = key.decrypt(ret["key"], encryption_algorithm)
+    pcrypt = salt.crypt.Crypticle(master_opts, aes)
+    data = pcrypt.loads(ret[dictkey])
+    assert data == pillar_data
+
+
+def test_req_chan_decode_data_dict_entry_v1(
+    pki_dir, encryption_algorithm, minion_opts, master_opts
+):
+    mockloop = MagicMock()
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master")))
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    client = salt.channel.client.ReqChannel.factory(minion_opts, io_loop=mockloop)
+    dictkey = "pillar"
+    target = "minion"
+    pillar_data = {"pillar1": "meh"}
+    ret = server._encrypt_private(
+        pillar_data,
+        dictkey,
+        target,
+        sign_messages=False,
+        encryption_algorithm=encryption_algorithm,
+    )
+    key = client.auth.get_keys()
+    aes = key.decrypt(ret["key"], encryption_algorithm)
+    pcrypt = salt.crypt.Crypticle(client.opts, aes)
+    ret_pillar_data = pcrypt.loads(ret[dictkey])
+    assert ret_pillar_data == pillar_data
+
+
+async def test_req_chan_decode_data_dict_entry_v2(minion_opts, master_opts, pki_dir):
+    mockloop = MagicMock()
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop)
+
+    dictkey = "pillar"
+    target = "minion"
+    pillar_data = {"pillar1": "meh"}
+
+    # Mock auth and message client.
+    auth = client.auth
+    auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY)
+    client.auth = MagicMock()
+    client.auth.mpub = auth.mpub
+    client.auth.authenticated = True
+    client.auth.get_keys = auth.get_keys
+    client.auth.crypticle.dumps = auth.crypticle.dumps
+    client.auth.crypticle.loads = auth.crypticle.loads
+    client.transport = MagicMock()
+
+    print(minion_opts["encryption_algorithm"])
+
+    @tornado.gen.coroutine
+    def mocksend(msg, timeout=60, tries=3):
+        client.transport.msg = msg
+        load = client.auth.crypticle.loads(msg["load"])
+        ret = server._encrypt_private(
+            pillar_data,
+            dictkey,
+            target,
+            nonce=load["nonce"],
+            sign_messages=True,
+            encryption_algorithm=minion_opts["encryption_algorithm"],
+            signing_algorithm=minion_opts["signing_algorithm"],
+        )
+        raise tornado.gen.Return(ret)
+
+    client.transport.send = mocksend
+
+    # Note the 'ver' value in 'load' does not represent the the 'version' sent
+    # in the top level of the transport's message.
+    load = {
+        "id": target,
+        "grains": {},
+        "saltenv": "base",
+        "pillarenv": "base",
+        "pillar_override": True,
+        "extra_minion_data": {},
+        "ver": "2",
+        "cmd": "_pillar",
+    }
+    ret = await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
+        load,
+        dictkey="pillar",
+    )
+    assert "version" in client.transport.msg
+    assert client.transport.msg["version"] == 2
+    assert ret == {"pillar1": "meh"}
+
+
+async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(
+    pki_dir, minion_opts, master_opts
+):
+    mockloop = MagicMock()
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop)
+
+    dictkey = "pillar"
+    badnonce = "abcdefg"
+    target = "minion"
+    pillar_data = {"pillar1": "meh"}
+
+    # Mock auth and message client.
+    auth = client.auth
+    auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY)
+    client.auth = MagicMock()
+    client.auth.mpub = auth.mpub
+    client.auth.authenticated = True
+    client.auth.get_keys = auth.get_keys
+    client.auth.crypticle.dumps = auth.crypticle.dumps
+    client.auth.crypticle.loads = auth.crypticle.loads
+    client.transport = MagicMock()
+    ret = server._encrypt_private(
+        pillar_data,
+        dictkey,
+        target,
+        nonce=badnonce,
+        sign_messages=True,
+        encryption_algorithm=minion_opts["encryption_algorithm"],
+        signing_algorithm=minion_opts["signing_algorithm"],
+    )
+
+    @tornado.gen.coroutine
+    def mocksend(msg, timeout=60, tries=3):
+        client.transport.msg = msg
+        raise tornado.gen.Return(ret)
+
+    client.transport.send = mocksend
+
+    # Note the 'ver' value in 'load' does not represent the the 'version' sent
+    # in the top level of the transport's message.
+    load = {
+        "id": target,
+        "grains": {},
+        "saltenv": "base",
+        "pillarenv": "base",
+        "pillar_override": True,
+        "extra_minion_data": {},
+        "ver": "2",
+        "cmd": "_pillar",
+    }
+
+    with pytest.raises(salt.crypt.AuthenticationError) as excinfo:
+        ret = await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
+            load,
+            dictkey="pillar",
+        )
+    assert "Pillar nonce verification failed." == excinfo.value.message
+
+
+async def test_req_chan_decode_data_dict_entry_v2_bad_signature(
+    pki_dir, minion_opts, master_opts
+):
+    mockloop = MagicMock()
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop)
+
+    dictkey = "pillar"
+    badnonce = "abcdefg"
+    target = "minion"
+    pillar_data = {"pillar1": "meh"}
+
+    # Mock auth and message client.
+    auth = client.auth
+    auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY)
+    client.auth = MagicMock()
+    client.auth.mpub = auth.mpub
+    client.auth.authenticated = True
+    client.auth.get_keys = auth.get_keys
+    client.auth.crypticle.dumps = auth.crypticle.dumps
+    client.auth.crypticle.loads = auth.crypticle.loads
+    client.transport = MagicMock()
+
+    @tornado.gen.coroutine
+    def mocksend(msg, timeout=60, tries=3):
+        client.transport.msg = msg
+        load = client.auth.crypticle.loads(msg["load"])
+        ret = server._encrypt_private(
+            pillar_data,
+            dictkey,
+            target,
+            nonce=load["nonce"],
+            sign_messages=True,
+            encryption_algorithm=minion_opts["encryption_algorithm"],
+            signing_algorithm=minion_opts["signing_algorithm"],
+        )
+
+        key = client.auth.get_keys()
+        aes = key.decrypt(ret["key"], minion_opts["encryption_algorithm"])
+        pcrypt = salt.crypt.Crypticle(client.opts, aes)
+        signed_msg = pcrypt.loads(ret[dictkey])
+        # Changing the pillar data will cause the signature verification to
+        # fail.
+        data = salt.payload.loads(signed_msg["data"])
+        data["pillar"] = {"pillar1": "bar"}
+        signed_msg["data"] = salt.payload.dumps(data)
+        ret[dictkey] = pcrypt.dumps(signed_msg)
+        raise tornado.gen.Return(ret)
+
+    client.transport.send = mocksend
+
+    # Note the 'ver' value in 'load' does not represent the the 'version' sent
+    # in the top level of the transport's message.
+    load = {
+        "id": target,
+        "grains": {},
+        "saltenv": "base",
+        "pillarenv": "base",
+        "pillar_override": True,
+        "extra_minion_data": {},
+        "ver": "2",
+        "cmd": "_pillar",
+    }
+
+    with pytest.raises(salt.crypt.AuthenticationError) as excinfo:
+        ret = await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
+            load,
+            dictkey="pillar",
+        )
+    assert "Pillar payload signature failed to validate." == excinfo.value.message
+
+
+async def test_req_chan_decode_data_dict_entry_v2_bad_key(
+    pki_dir, minion_opts, master_opts
+):
+    mockloop = MagicMock()
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop)
+
+    dictkey = "pillar"
+    badnonce = "abcdefg"
+    target = "minion"
+    pillar_data = {"pillar1": "meh"}
+
+    # Mock auth and message client.
+    auth = client.auth
+    auth._crypticle = salt.crypt.Crypticle(master_opts, AES_KEY)
+    client.auth = MagicMock()
+    client.auth.mpub = auth.mpub
+    client.auth.authenticated = True
+    client.auth.get_keys = auth.get_keys
+    client.auth.crypticle.dumps = auth.crypticle.dumps
+    client.auth.crypticle.loads = auth.crypticle.loads
+    client.transport = MagicMock()
+
+    @tornado.gen.coroutine
+    def mocksend(msg, timeout=60, tries=3):
+        client.transport.msg = msg
+        load = client.auth.crypticle.loads(msg["load"])
+        ret = server._encrypt_private(
+            pillar_data,
+            dictkey,
+            target,
+            nonce=load["nonce"],
+            sign_messages=True,
+            encryption_algorithm=minion_opts["encryption_algorithm"],
+            signing_algorithm=minion_opts["signing_algorithm"],
+        )
+
+        mkey = client.auth.get_keys()
+        aes = mkey.decrypt(ret["key"], minion_opts["encryption_algorithm"])
+        pcrypt = salt.crypt.Crypticle(client.opts, aes)
+        signed_msg = pcrypt.loads(ret[dictkey])
+
+        # Now encrypt with a different key
+        key = salt.crypt.Crypticle.generate_key_string()
+        pcrypt = salt.crypt.Crypticle(master_opts, key)
+        pubfn = os.path.join(master_opts["pki_dir"], "minions", "minion")
+        pub = salt.crypt.PublicKey(pubfn)
+        ret[dictkey] = pcrypt.dumps(signed_msg)
+        key = salt.utils.stringutils.to_bytes(key)
+        ret["key"] = pub.encrypt(key, minion_opts["encryption_algorithm"])
+        raise tornado.gen.Return(ret)
+
+    client.transport.send = mocksend
+
+    # Note the 'ver' value in 'load' does not represent the the 'version' sent
+    # in the top level of the transport's message.
+    load = {
+        "id": target,
+        "grains": {},
+        "saltenv": "base",
+        "pillarenv": "base",
+        "pillar_override": True,
+        "extra_minion_data": {},
+        "ver": "2",
+        "cmd": "_pillar",
+    }
+    try:
+        with pytest.raises(salt.crypt.AuthenticationError) as excinfo:
+            await client.crypted_transfer_decode_dictentry(  # pylint: disable=E1121,E1123
+                load,
+                dictkey="pillar",
+            )
+        assert "Key verification failed." == excinfo.value.message
+    finally:
+        client.close()
+        server.close()
+
+
+async def test_req_serv_auth_v1(pki_dir, minion_opts, master_opts):
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "max_minions": 0,
+            "auto_accept": False,
+            "open_mode": False,
+            "key_pass": None,
+            "master_sign_pubkey": False,
+            "publish_port": 4505,
+            "auth_mode": 1,
+        }
+    )
+    SMaster.secrets["aes"] = {
+        "secret": multiprocessing.Array(
+            ctypes.c_char,
+            salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+        ),
+        "reload": salt.crypt.Crypticle.generate_key_string,
+    }
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+
+    pub = salt.crypt.get_rsa_pub_key(str(pki_dir.joinpath("minion", "minion.pub")))
+    token = salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string())
+    nonce = uuid.uuid4().hex
+
+    # We need to read the public key with fopen otherwise the newlines might
+    # not match on windows.
+    with salt.utils.files.fopen(
+        str(pki_dir.joinpath("minion", "minion.pub")), "r"
+    ) as fp:
+        pub_key = salt.crypt.clean_key(fp.read())
+
+    load = {
+        "cmd": "_auth",
+        "id": "minion",
+        "token": token,
+        "pub": pub_key,
+        "enc_algo": minion_opts["encryption_algorithm"],
+        "sig_algo": minion_opts["signing_algorithm"],
+    }
+    ret = server._auth(load, sign_messages=False)
+    assert "load" not in ret
+
+
+async def test_req_serv_auth_v2(pki_dir, minion_opts, master_opts):
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "max_minions": 0,
+            "auto_accept": False,
+            "open_mode": False,
+            "key_pass": None,
+            "master_sign_pubkey": False,
+            "publish_port": 4505,
+            "auth_mode": 1,
+        }
+    )
+    SMaster.secrets["aes"] = {
+        "secret": multiprocessing.Array(
+            ctypes.c_char,
+            salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+        ),
+        "reload": salt.crypt.Crypticle.generate_key_string,
+    }
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+
+    pub = salt.crypt.get_rsa_pub_key(str(pki_dir.joinpath("minion", "minion.pub")))
+    token = salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string())
+    nonce = uuid.uuid4().hex
+
+    # We need to read the public key with fopen otherwise the newlines might
+    # not match on windows.
+    with salt.utils.files.fopen(
+        str(pki_dir.joinpath("minion", "minion.pub")), "r"
+    ) as fp:
+        pub_key = fp.read()
+
+    load = {
+        "cmd": "_auth",
+        "id": "minion",
+        "nonce": nonce,
+        "token": token,
+        "pub": pub_key,
+        "enc_algo": minion_opts["encryption_algorithm"],
+        "sig_algo": minion_opts["signing_algorithm"],
+    }
+    ret = server._auth(load, sign_messages=True)
+    assert "sig" in ret
+    assert "load" in ret
+
+
+async def test_req_chan_auth_v2(pki_dir, io_loop, minion_opts, master_opts):
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "max_minions": 0,
+            "auto_accept": False,
+            "open_mode": False,
+            "key_pass": None,
+            "publish_port": 4505,
+            "auth_mode": 1,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    SMaster.secrets["aes"] = {
+        "secret": multiprocessing.Array(
+            ctypes.c_char,
+            salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+        ),
+        "reload": salt.crypt.Crypticle.generate_key_string,
+    }
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    master_opts["master_sign_pubkey"] = False
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+    minion_opts["verify_master_pubkey_sign"] = False
+    minion_opts["always_verify_signature"] = False
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop)
+    signin_payload = client.auth.minion_sign_in_payload()
+    pload = client._package_load(signin_payload)
+    assert "version" in pload
+    assert pload["version"] == 2
+
+    ret = server._auth(pload["load"], sign_messages=True)
+    assert "sig" in ret
+    ret = client.auth.handle_signin_response(signin_payload, ret)
+    assert "aes" in ret
+    assert "master_uri" in ret
+    assert "publish_port" in ret
+
+
+async def test_req_chan_auth_v2_with_master_signing(
+    pki_dir, io_loop, minion_opts, master_opts
+):
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "max_minions": 0,
+            "auto_accept": False,
+            "open_mode": False,
+            "key_pass": None,
+            "publish_port": 4505,
+            "auth_mode": 1,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    SMaster.secrets["aes"] = {
+        "secret": multiprocessing.Array(
+            ctypes.c_char,
+            salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+        ),
+        "reload": salt.crypt.Crypticle.generate_key_string,
+    }
+    master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master")))
+    master_opts["master_sign_pubkey"] = True
+    master_opts["master_use_pubkey_signature"] = False
+    master_opts["signing_key_pass"] = ""
+    master_opts["master_sign_key_name"] = "master_sign"
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+    minion_opts["verify_master_pubkey_sign"] = True
+    minion_opts["always_verify_signature"] = True
+    minion_opts["master_sign_key_name"] = "master_sign"
+    minion_opts["master"] = "master"
+
+    assert (
+        pki_dir.joinpath("minion", "minion_master.pub").read_text()
+        == pki_dir.joinpath("master", "master.pub").read_text()
+    )
+
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop)
+    signin_payload = client.auth.minion_sign_in_payload()
+    pload = client._package_load(signin_payload)
+    assert "version" in pload
+    assert pload["version"] == 2
+
+    server_reply = server._auth(pload["load"], sign_messages=True)
+    # With version 2 we always get a clear signed response
+    assert "enc" in server_reply
+    assert server_reply["enc"] == "clear"
+    assert "sig" in server_reply
+    assert "load" in server_reply
+    ret = client.auth.handle_signin_response(signin_payload, server_reply)
+    assert "aes" in ret
+    assert "master_uri" in ret
+    assert "publish_port" in ret
+
+    # Now create a new master key pair and try auth with it.
+    mapriv = pki_dir.joinpath("master", "master.pem")
+    mapriv.unlink()
+    mapriv.write_text(MASTER2_PRIV_KEY.strip())
+    mapub = pki_dir.joinpath("master", "master.pub")
+    mapub.unlink()
+    mapub.write_text(MASTER2_PUB_KEY.strip())
+
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+
+    signin_payload = client.auth.minion_sign_in_payload()
+    pload = client._package_load(signin_payload)
+    server_reply = server._auth(pload["load"], sign_messages=True)
+    ret = client.auth.handle_signin_response(signin_payload, server_reply)
+
+    assert "aes" in ret
+    assert "master_uri" in ret
+    assert "publish_port" in ret
+
+    assert (
+        pki_dir.joinpath("minion", "minion_master.pub").read_text()
+        == pki_dir.joinpath("master", "master.pub").read_text()
+    )
+
+
+async def test_req_chan_auth_v2_new_minion_with_master_pub(
+    pki_dir, io_loop, minion_opts, master_opts
+):
+
+    pki_dir.joinpath("master", "minions", "minion").unlink()
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "max_minions": 0,
+            "auto_accept": False,
+            "open_mode": False,
+            "key_pass": None,
+            "publish_port": 4505,
+            "auth_mode": 1,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    SMaster.secrets["aes"] = {
+        "secret": multiprocessing.Array(
+            ctypes.c_char,
+            salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+        ),
+        "reload": salt.crypt.Crypticle.generate_key_string,
+    }
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    master_opts["master_sign_pubkey"] = False
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+    minion_opts["verify_master_pubkey_sign"] = False
+    minion_opts["always_verify_signature"] = False
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop)
+    signin_payload = client.auth.minion_sign_in_payload()
+    pload = client._package_load(signin_payload)
+    assert "version" in pload
+    assert pload["version"] == 2
+
+    ret = server._auth(pload["load"], sign_messages=True)
+    assert "sig" in ret
+    ret = client.auth.handle_signin_response(signin_payload, ret)
+    assert ret == "retry"
+
+
+async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig(
+    pki_dir, io_loop, minion_opts, master_opts
+):
+
+    pki_dir.joinpath("master", "minions", "minion").unlink()
+
+    # Give the master a different key than the minion has.
+    mapriv = pki_dir.joinpath("master", "master.pem")
+    mapriv.unlink()
+    mapriv.write_text(MASTER2_PRIV_KEY.strip())
+    mapub = pki_dir.joinpath("master", "master.pub")
+    mapub.unlink()
+    mapub.write_text(MASTER2_PUB_KEY.strip())
+
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "max_minions": 0,
+            "auto_accept": False,
+            "open_mode": False,
+            "key_pass": None,
+            "publish_port": 4505,
+            "auth_mode": 1,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    SMaster.secrets["aes"] = {
+        "secret": multiprocessing.Array(
+            ctypes.c_char,
+            salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+        ),
+        "reload": salt.crypt.Crypticle.generate_key_string,
+    }
+    master_opts.update(
+        pki_dir=str(pki_dir.joinpath("master")), master_sign_pubkey=False
+    )
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+    minion_opts["verify_master_pubkey_sign"] = False
+    minion_opts["always_verify_signature"] = False
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop)
+    signin_payload = client.auth.minion_sign_in_payload()
+    pload = client._package_load(signin_payload)
+    assert "version" in pload
+    assert pload["version"] == 2
+
+    ret = server._auth(pload["load"], sign_messages=True)
+    assert "sig" in ret
+    with pytest.raises(salt.crypt.SaltClientError, match="Invalid signature"):
+        ret = client.auth.handle_signin_response(signin_payload, ret)
+
+
+async def test_req_chan_auth_v2_new_minion_without_master_pub(
+    minion_opts,
+    master_opts,
+    pki_dir,
+    io_loop,
+):
+
+    pki_dir.joinpath("master", "minions", "minion").unlink()
+    pki_dir.joinpath("minion", "minion_master.pub").unlink()
+    minion_opts.update(
+        {
+            "master_uri": "tcp://127.0.0.1:4506",
+            "interface": "127.0.0.1",
+            "ret_port": 4506,
+            "ipv6": False,
+            "sock_dir": ".",
+            "pki_dir": str(pki_dir.joinpath("minion")),
+            "id": "minion",
+            "__role": "minion",
+            "keysize": 4096,
+            "max_minions": 0,
+            "auto_accept": False,
+            "open_mode": False,
+            "key_pass": None,
+            "publish_port": 4505,
+            "auth_mode": 1,
+            "acceptance_wait_time": 3,
+            "acceptance_wait_time_max": 3,
+        }
+    )
+    SMaster.secrets["aes"] = {
+        "secret": multiprocessing.Array(
+            ctypes.c_char,
+            salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+        ),
+        "reload": salt.crypt.Crypticle.generate_key_string,
+    }
+    master_opts.update(pki_dir=str(pki_dir.joinpath("master")))
+    master_opts["master_sign_pubkey"] = False
+    server = salt.channel.server.ReqServerChannel.factory(master_opts)
+    server.auto_key = salt.daemons.masterapi.AutoKey(server.opts)
+    server.cache_cli = False
+    server.event = salt.utils.event.get_master_event(
+        master_opts, master_opts["sock_dir"], listen=False
+    )
+    server.master_key = salt.crypt.MasterKeys(server.opts)
+    minion_opts["verify_master_pubkey_sign"] = False
+    minion_opts["always_verify_signature"] = False
+    client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop)
+    signin_payload = client.auth.minion_sign_in_payload()
+    pload = client._package_load(signin_payload)
+    try:
+        assert "version" in pload
+        assert pload["version"] == 2
+
+        ret = server._auth(pload["load"], sign_messages=True)
+        assert "sig" in ret
+        ret = client.auth.handle_signin_response(signin_payload, ret)
+        assert ret == "retry"
+    finally:
+        client.close()
+        server.close()
+
+
 async def test_req_server_garbage_request(io_loop):
     """
     Validate invalid msgpack messages will not raise exceptions in the
diff --git a/tests/pytests/unit/utils/test_atomicfile.py b/tests/pytests/unit/utils/test_atomicfile.py
new file mode 100644
index 00000000000..06dfd9a5b68
--- /dev/null
+++ b/tests/pytests/unit/utils/test_atomicfile.py
@@ -0,0 +1,27 @@
+"""
+Tests for atomicfile utility module.
+"""
+
+import pytest
+
+import salt.utils.files
+from salt.utils.atomicfile import atomic_open
+
+
+@pytest.mark.skip_on_windows(reason="Not a Windows test")
+def test_atomicfile_respects_umask(tmp_path):
+    """
+    Test that creating a file using atomic_open respects the umask, instead of
+    creating the file with 0600 perms.
+    """
+    new_file = tmp_path / "foo"
+    contents = "bar"
+
+    # Set the umask specifically for this test so that we know what the mode of
+    # the created file should be.
+    with salt.utils.files.set_umask(0o022):
+        with atomic_open(str(new_file), "w") as fh_:
+            fh_.write(contents)
+
+    assert new_file.read_text() == contents
+    assert oct(new_file.stat().st_mode)[-3:] == "644"
diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py
index 4b0c11afd60..03e4398dd98 100644
--- a/tests/pytests/unit/utils/test_gitfs.py
+++ b/tests/pytests/unit/utils/test_gitfs.py
@@ -23,6 +23,14 @@ except AttributeError:
 if HAS_PYGIT2:
     import pygit2
 
+    try:
+        from pygit2.enums import ObjectType
+
+        HAS_PYGIT2_ENUMS = True
+
+    except ModuleNotFoundError:
+        HAS_PYGIT2_ENUMS = False
+
 
 @pytest.fixture
 def minion_opts(tmp_path):
@@ -147,9 +155,14 @@ def _prepare_remote_repository_pygit2(tmp_path):
         tree,
         [repository.head.target],
     )
-    repository.create_tag(
-        "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
-    )
+    if HAS_PYGIT2_ENUMS:
+        repository.create_tag(
+            "annotated_tag", commit, ObjectType.COMMIT, signature, "some message"
+        )
+    else:
+        repository.create_tag(
+            "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
+        )
     return remote
 
 
diff --git a/tests/pytests/unit/utils/test_msgpack.py b/tests/pytests/unit/utils/test_msgpack.py
index e15da262b00..feebcf1f88d 100644
--- a/tests/pytests/unit/utils/test_msgpack.py
+++ b/tests/pytests/unit/utils/test_msgpack.py
@@ -4,6 +4,10 @@ import salt.utils.msgpack
 from tests.support.mock import MagicMock, patch
 
 
+@pytest.mark.skipif(
+    salt.utils.msgpack.version < (1, 0, 0),
+    reason="Test requires msgpack version >= 1.0.0",
+)
 def test_load_encoding(tmp_path):
     """
     test when using msgpack version >= 1.0.0 we
diff --git a/tests/pytests/unit/utils/test_nacl.py b/tests/pytests/unit/utils/test_nacl.py
index 5c60d880b2f..91be6855487 100644
--- a/tests/pytests/unit/utils/test_nacl.py
+++ b/tests/pytests/unit/utils/test_nacl.py
@@ -73,6 +73,20 @@ def test_keygen_keyfile(test_keygen):
 
         ret = nacl.keygen(keyfile=fpath)
         assert f"saved pk_file: {fpath}.pub" == ret
+        with salt.utils.files.fopen(str(fpath) + ".pub", "rb") as rfh:
+            assert test_keygen["pk"] == rfh.read()
+        salt.utils.files.remove(str(fpath) + ".pub")
+
+
+def test_keygen_nonexistent_sk_file():
+    """
+    test nacl.keygen function
+    with nonexistent/new sk_file
+    """
+    with pytest.helpers.temp_file("test_keygen_sk_file") as fpath:
+        salt.utils.files.remove(str(fpath))
+        ret = nacl.keygen(sk_file=str(fpath))
+        assert f"saved sk_file:{fpath}  pk_file: {fpath}.pub" == ret
         salt.utils.files.remove(str(fpath) + ".pub")
 
 
diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py
index 12d545b0154..d3d9744fb45 100644
--- a/tests/pytests/unit/utils/test_network.py
+++ b/tests/pytests/unit/utils/test_network.py
@@ -7,11 +7,12 @@ import pytest
 import salt.exceptions
 import salt.utils.network
 import salt.utils.network as network
+import salt.utils.platform
 from salt._compat import ipaddress
 from tests.support.mock import MagicMock, create_autospec, mock_open, patch
 
 pytestmark = [
-    pytest.mark.skip_on_windows,
+    pytest.mark.windows_whitelisted,
 ]
 
 
@@ -722,13 +723,13 @@ def test_netlink_tool_remote_on_a():
         with patch("salt.utils.platform.is_linux", return_value=True):
             with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT):
                 remotes = network._netlink_tool_remote_on("4506", "local_port")
-                assert remotes == {"192.168.122.177", "::ffff:127.0.0.1"}
+                assert remotes == {"192.168.122.177", "127.0.0.1"}
 
 
 def test_netlink_tool_remote_on_b():
     with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT):
         remotes = network._netlink_tool_remote_on("4505", "remote_port")
-        assert remotes == {"127.0.0.1", "::ffff:1.2.3.4"}
+        assert remotes == {"127.0.0.1", "1.2.3.4"}
 
 
 def test_openbsd_remotes_on():
@@ -1430,8 +1431,17 @@ def test_isportopen_false():
     assert ret is False
 
 
-def test_isportopen():
-    ret = network.isportopen("127.0.0.1", "22")
+@pytest.fixture
+def openport_22233():
+    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    s.bind(("0.0.0.0", 22233))
+    s.listen(5)
+    yield
+    s.close()
+
+
+def test_isportopen(openport_22233):
+    ret = network.isportopen("127.0.0.1", 22233)
     assert ret == 0
 
 
@@ -1445,38 +1455,44 @@ def test_get_socket():
     assert ret.type == socket.SOCK_STREAM
 
 
+# @pytest.mark.skip_on_windows(reason="Do not run on Windows")
 def test_ip_to_host(grains):
-    ret = network.ip_to_host("127.0.0.1")
-    if grains["oscodename"] == "Photon":
-        # Photon returns this for IPv4
-        assert ret == "ipv6-localhost"
+    if salt.utils.platform.is_windows():
+        hostname = socket.gethostname()
     else:
-        assert ret == "localhost"
+        hostname = "localhost"
+
+    ret = network.ip_to_host("127.0.0.1")
+    if grains.get("oscodename") == "Photon":
+        # Photon returns this for IPv4
+        assert ret in ("ipv6-localhost", "localhost")
+    else:
+        assert ret == hostname
 
     ret = network.ip_to_host("2001:a71::1")
     assert ret is None
 
     ret = network.ip_to_host("::1")
     if grains["os"] == "Amazon":
-        assert ret == "localhost6"
+        assert ret in ("localhost6", "localhost")
     elif grains["os_family"] == "Debian":
         if grains["osmajorrelease"] == 12:
-            assert ret == "localhost"
+            assert ret == hostname
         else:
-            assert ret == "ip6-localhost"
+            assert ret in ("ip6-localhost", "localhost")
     elif grains["os_family"] == "RedHat":
         if grains["oscodename"] == "Photon":
-            assert ret == "ipv6-localhost"
+            assert ret in ("ipv6-localhost", "localhost")
         else:
-            assert ret == "localhost"
+            assert ret == hostname
     elif grains["os_family"] == "Arch":
         if grains.get("osmajorrelease", None) is None:
             # running doesn't have osmajorrelease grains
-            assert ret == "localhost"
+            assert ret == hostname
         else:
-            assert ret == "ip6-localhost"
+            assert ret in ("ip6-localhost", "localhost")
     else:
-        assert ret == "localhost"
+        assert ret == hostname
 
 
 @pytest.mark.parametrize(
@@ -1509,7 +1525,7 @@ def test_rpad_ipv4_network(addr, expected):
 def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict):
 
     with patch(
-        "salt.utils.network.linux_interfaces",
+        "salt.utils.network.interfaces",
         MagicMock(return_value=linux_interfaces_dict),
     ):
         hw_addrs = network.hw_addr("eth0")
@@ -1534,7 +1550,7 @@ def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict):
 def test_interface_and_ip(linux_interfaces_dict):
 
     with patch(
-        "salt.utils.network.linux_interfaces",
+        "salt.utils.network.interfaces",
         MagicMock(return_value=linux_interfaces_dict),
     ):
         expected = [
@@ -1560,7 +1576,7 @@ def test_interface_and_ip(linux_interfaces_dict):
 def test_subnets(linux_interfaces_dict):
 
     with patch(
-        "salt.utils.network.linux_interfaces",
+        "salt.utils.network.interfaces",
         MagicMock(return_value=linux_interfaces_dict),
     ):
         ret = network.subnets()
@@ -1583,14 +1599,14 @@ def test_in_subnet(caplog):
 
 def test_ip_addrs(linux_interfaces_dict):
     with patch(
-        "salt.utils.network.linux_interfaces",
+        "salt.utils.network.interfaces",
         MagicMock(return_value=linux_interfaces_dict),
     ):
         ret = network.ip_addrs("eth0")
         assert ret == ["10.10.10.56"]
 
     with patch(
-        "salt.utils.network.linux_interfaces",
+        "salt.utils.network.interfaces",
         MagicMock(return_value=linux_interfaces_dict),
     ):
         ret = network.ip_addrs6("eth0")
diff --git a/tests/pytests/unit/utils/test_pycrypto.py b/tests/pytests/unit/utils/test_pycrypto.py
index 1dfcf9621c4..9dff08f883e 100644
--- a/tests/pytests/unit/utils/test_pycrypto.py
+++ b/tests/pytests/unit/utils/test_pycrypto.py
@@ -57,21 +57,20 @@ def test_gen_hash_crypt(algorithm, expected):
     """
     Test gen_hash with crypt library
     """
-    with patch("salt.utils.pycrypto.methods", {}):
-        ret = salt.utils.pycrypto.gen_hash(
-            crypt_salt=expected["salt"], password=passwd, algorithm=algorithm
-        )
-        assert ret == expected["hashed"]
+    ret = salt.utils.pycrypto.gen_hash(
+        crypt_salt=expected["salt"], password=passwd, algorithm=algorithm
+    )
+    assert ret == expected["hashed"]
 
-        ret = salt.utils.pycrypto.gen_hash(
-            crypt_salt=expected["badsalt"], password=passwd, algorithm=algorithm
-        )
-        assert ret != expected["hashed"]
+    ret = salt.utils.pycrypto.gen_hash(
+        crypt_salt=expected["badsalt"], password=passwd, algorithm=algorithm
+    )
+    assert ret != expected["hashed"]
 
-        ret = salt.utils.pycrypto.gen_hash(
-            crypt_salt=None, password=passwd, algorithm=algorithm
-        )
-        assert ret != expected["hashed"]
+    ret = salt.utils.pycrypto.gen_hash(
+        crypt_salt=None, password=passwd, algorithm=algorithm
+    )
+    assert ret != expected["hashed"]
 
 
 @pytest.mark.skipif(not salt.utils.pycrypto.HAS_CRYPT, reason="crypt not available")
diff --git a/tests/pytests/unit/utils/test_win_functions.py b/tests/pytests/unit/utils/test_win_functions.py
index 4f1e8b39d56..6fcb1081d38 100644
--- a/tests/pytests/unit/utils/test_win_functions.py
+++ b/tests/pytests/unit/utils/test_win_functions.py
@@ -1,7 +1,9 @@
 import platform
 
 import pytest
+from saltfactories.utils import random_string
 
+import salt.modules.win_useradd
 import salt.utils.win_functions as win_functions
 from tests.support.mock import MagicMock, patch
 
@@ -36,6 +38,14 @@ except ImportError:
     HAS_PYWIN = False
 
 
+@pytest.fixture(scope="module")
+def test_user():
+    user_name = random_string("test-")
+    salt.modules.win_useradd.add(name=user_name, password="P@ssw0rd")
+    yield user_name
+    salt.modules.win_useradd.delete(name=user_name)
+
+
 # Test cases for salt.utils.win_functions.
 
 
@@ -176,7 +186,7 @@ def test_get_sam_name_everyone():
 
 
 @pytest.mark.skipif(not HAS_PYWIN, reason="Requires pywintypes libraries")
-def test_get_sam_name():
-    expected = "\\".join([platform.node()[:15], "Administrator"])
-    result = win_functions.get_sam_name("Administrator")
-    assert result == expected
+def test_get_sam_name(test_user):
+    expected = "\\".join([platform.node()[:15], test_user])
+    result = win_functions.get_sam_name(test_user)
+    assert result.lower() == expected.lower()
diff --git a/tests/pytests/unit/utils/test_win_update.py b/tests/pytests/unit/utils/test_win_update.py
index a221ee31952..9939428c7ca 100644
--- a/tests/pytests/unit/utils/test_win_update.py
+++ b/tests/pytests/unit/utils/test_win_update.py
@@ -1,14 +1,52 @@
 import pytest
 
+try:
+    import win32com.client
+
+    HAS_WIN32 = True
+except ImportError:
+    HAS_WIN32 = False
+
 import salt.utils.win_update as win_update
 from tests.support.mock import MagicMock, patch
 
 pytestmark = [
     pytest.mark.windows_whitelisted,
     pytest.mark.skip_unless_on_windows,
+    pytest.mark.skipif(not HAS_WIN32, reason="Requires Win32 libraries"),
 ]
 
 
+def test_available_no_updates():
+    """
+    Test installed when there are no updates on the system
+    """
+    with patch("salt.utils.winapi.Com", autospec=True), patch(
+        "win32com.client.Dispatch", autospec=True
+    ), patch.object(win_update.WindowsUpdateAgent, "refresh", autospec=True):
+        wua = win_update.WindowsUpdateAgent(online=False)
+        wua._updates = []
+
+        available_updates = wua.available()
+
+        assert available_updates.updates.Add.call_count == 0
+
+
+def test_available_no_updates_empty_objects():
+    """
+    Test installed when there are no updates on the system
+    """
+    with patch("salt.utils.winapi.Com", autospec=True), patch(
+        "win32com.client.Dispatch", autospec=True
+    ), patch.object(win_update.WindowsUpdateAgent, "refresh", autospec=True):
+        wua = win_update.WindowsUpdateAgent(online=False)
+        wua._updates = [win32com.client.CDispatch, win32com.client.CDispatch]
+
+        available_updates = wua.available()
+
+        assert available_updates.updates.Add.call_count == 0
+
+
 def test_installed_no_updates():
     """
     Test installed when there are no updates on the system
diff --git a/tests/pytests/unit/utils/test_yamldumper.py b/tests/pytests/unit/utils/test_yamldumper.py
new file mode 100644
index 00000000000..09a1106f545
--- /dev/null
+++ b/tests/pytests/unit/utils/test_yamldumper.py
@@ -0,0 +1,123 @@
+"""
+    Unit tests for salt.utils.yamldumper
+"""
+
+from collections import OrderedDict, defaultdict
+
+import salt.utils.yamldumper
+from salt.utils.context import NamespacedDictWrapper
+from salt.utils.odict import HashableOrderedDict
+
+
+def test_yaml_dump():
+    """
+    Test yaml.dump a dict
+    """
+    data = {"foo": "bar"}
+    exp_yaml = "{foo: bar}\n"
+
+    assert salt.utils.yamldumper.dump(data) == exp_yaml
+
+    assert salt.utils.yamldumper.dump(
+        data, default_flow_style=False
+    ) == exp_yaml.replace("{", "").replace("}", "")
+
+
+def test_yaml_safe_dump():
+    """
+    Test yaml.safe_dump a dict
+    """
+    data = {"foo": "bar"}
+    assert salt.utils.yamldumper.safe_dump(data) == "{foo: bar}\n"
+
+    assert (
+        salt.utils.yamldumper.safe_dump(data, default_flow_style=False) == "foo: bar\n"
+    )
+
+
+def test_yaml_ordered_dump():
+    """
+    Test yaml.dump with OrderedDict
+    """
+    data = OrderedDict([("foo", "bar"), ("baz", "qux")])
+    exp_yaml = "{foo: bar, baz: qux}\n"
+    assert (
+        salt.utils.yamldumper.dump(data, Dumper=salt.utils.yamldumper.OrderedDumper)
+        == exp_yaml
+    )
+
+
+def test_yaml_safe_ordered_dump():
+    """
+    Test yaml.safe_dump with OrderedDict
+    """
+    data = OrderedDict([("foo", "bar"), ("baz", "qux")])
+    exp_yaml = "{foo: bar, baz: qux}\n"
+    assert salt.utils.yamldumper.safe_dump(data) == exp_yaml
+
+
+def test_yaml_indent_safe_ordered_dump():
+    """
+    Test yaml.dump with IndentedSafeOrderedDumper
+    """
+    data = OrderedDict([("foo", ["bar", "baz"]), ("qux", "quux")])
+    # Account for difference in SafeDumper vs CSafeDumper
+    if salt.utils.yamldumper.SafeDumper.__name__ == "SafeDumper":
+        exp_yaml = "foo:\n  - bar\n  - baz\nqux: quux\n"
+    else:
+        exp_yaml = "foo:\n- bar\n- baz\nqux: quux\n"
+    assert (
+        salt.utils.yamldumper.dump(
+            data,
+            Dumper=salt.utils.yamldumper.IndentedSafeOrderedDumper,
+            default_flow_style=False,
+        )
+        == exp_yaml
+    )
+
+
+def test_yaml_defaultdict_dump():
+    """
+    Test yaml.dump with defaultdict
+    """
+    data = defaultdict(list)
+    data["foo"].append("bar")
+    exp_yaml = "foo: [bar]\n"
+    assert salt.utils.yamldumper.safe_dump(data) == exp_yaml
+
+
+def test_yaml_namespaced_dict_wrapper_dump():
+    """
+    Test yaml.dump with NamespacedDictWrapper
+    """
+    data = NamespacedDictWrapper({"test": {"foo": "bar"}}, "test")
+    exp_yaml = (
+        "!!python/object/new:salt.utils.context.NamespacedDictWrapper\n"
+        "dictitems: {foo: bar}\n"
+        "state:\n"
+        "  _NamespacedDictWrapper__dict:\n"
+        "    test: {foo: bar}\n"
+        "  pre_keys: !!python/tuple [test]\n"
+    )
+    assert salt.utils.yamldumper.dump(data) == exp_yaml
+
+
+def test_yaml_undefined_dump():
+    """
+    Test yaml.safe_dump with None
+    """
+    data = {"foo": None}
+    exp_yaml = "{foo: null}\n"
+    assert salt.utils.yamldumper.safe_dump(data) == exp_yaml
+
+
+def test_yaml_hashable_ordered_dict_dump():
+    """
+    Test yaml.dump with HashableOrderedDict
+    """
+    data = HashableOrderedDict([("foo", "bar"), ("baz", "qux")])
+    exp_yaml = "{foo: bar, baz: qux}\n"
+    assert (
+        salt.utils.yamldumper.dump(data, Dumper=salt.utils.yamldumper.OrderedDumper)
+        == exp_yaml
+    )
diff --git a/tests/pytests/unit/utils/verify/test_clean_path.py b/tests/pytests/unit/utils/verify/test_clean_path.py
index 062821eb796..9899cbde076 100644
--- a/tests/pytests/unit/utils/verify/test_clean_path.py
+++ b/tests/pytests/unit/utils/verify/test_clean_path.py
@@ -3,6 +3,7 @@ salt.utils.clean_path works as expected
 """
 
 import salt.utils.verify
+from tests.support.mock import patch
 
 
 def test_clean_path_valid(tmp_path):
@@ -15,3 +16,10 @@ def test_clean_path_invalid(tmp_path):
     path_a = str(tmp_path / "foo")
     path_b = str(tmp_path / "baz" / "bar")
     assert salt.utils.verify.clean_path(path_a, path_b) == ""
+
+
+def test_clean_path_relative_root(tmp_path):
+    with patch("os.getcwd", return_value=str(tmp_path)):
+        path_a = "foo"
+        path_b = str(tmp_path / "foo" / "bar")
+        assert salt.utils.verify.clean_path(path_a, path_b) == path_b
diff --git a/tests/pytests/unit/utils/win_lgpo/test_netsh.py b/tests/pytests/unit/utils/win_lgpo/test_netsh.py
index 4f74e1dc1c6..814ca05d364 100644
--- a/tests/pytests/unit/utils/win_lgpo/test_netsh.py
+++ b/tests/pytests/unit/utils/win_lgpo/test_netsh.py
@@ -9,72 +9,42 @@ pytestmark = [
 ]
 
 
-def test_get_settings_firewallpolicy_local():
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+def test_get_settings_firewallpolicy(store):
     ret = win_lgpo_netsh.get_settings(
-        profile="domain", section="firewallpolicy", store="local"
+        profile="domain", section="firewallpolicy", store=store
     )
     assert "Inbound" in ret
     assert "Outbound" in ret
 
 
-def test_get_settings_firewallpolicy_lgpo():
-    ret = win_lgpo_netsh.get_settings(
-        profile="domain", section="firewallpolicy", store="lgpo"
-    )
-    assert "Inbound" in ret
-    assert "Outbound" in ret
-
-
-def test_get_settings_logging_local():
-    ret = win_lgpo_netsh.get_settings(
-        profile="domain", section="logging", store="local"
-    )
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+def test_get_settings_logging(store):
+    ret = win_lgpo_netsh.get_settings(profile="domain", section="logging", store=store)
     assert "FileName" in ret
     assert "LogAllowedConnections" in ret
     assert "LogDroppedConnections" in ret
     assert "MaxFileSize" in ret
 
 
-def test_get_settings_logging_lgpo():
-    ret = win_lgpo_netsh.get_settings(profile="domain", section="logging", store="lgpo")
-    assert "FileName" in ret
-    assert "LogAllowedConnections" in ret
-    assert "LogDroppedConnections" in ret
-    assert "MaxFileSize" in ret
-
-
-def test_get_settings_settings_local():
-    ret = win_lgpo_netsh.get_settings(
-        profile="domain", section="settings", store="local"
-    )
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+def test_get_settings_settings(store):
+    ret = win_lgpo_netsh.get_settings(profile="domain", section="settings", store=store)
     assert "InboundUserNotification" in ret
     assert "LocalConSecRules" in ret
     assert "LocalFirewallRules" in ret
     assert "UnicastResponseToMulticast" in ret
 
 
-def test_get_settings_settings_lgpo():
-    ret = win_lgpo_netsh.get_settings(
-        profile="domain", section="settings", store="lgpo"
-    )
-    assert "InboundUserNotification" in ret
-    assert "LocalConSecRules" in ret
-    assert "LocalFirewallRules" in ret
-    assert "UnicastResponseToMulticast" in ret
-
-
-def test_get_settings_state_local():
-    ret = win_lgpo_netsh.get_settings(profile="domain", section="state", store="local")
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+def test_get_settings_state(store):
+    ret = win_lgpo_netsh.get_settings(profile="domain", section="state", store=store)
     assert "State" in ret
 
 
-def test_get_settings_state_lgpo():
-    ret = win_lgpo_netsh.get_settings(profile="domain", section="state", store="lgpo")
-    assert "State" in ret
-
-
-def test_get_all_settings_local():
-    ret = win_lgpo_netsh.get_all_settings(profile="domain", store="local")
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+def test_get_all_settings(store):
+    ret = win_lgpo_netsh.get_all_settings(profile="domain", store=store)
     assert "Inbound" in ret
     assert "Outbound" in ret
     assert "FileName" in ret
@@ -88,470 +58,287 @@ def test_get_all_settings_local():
     assert "State" in ret
 
 
-def test_get_all_settings_lgpo():
-    ret = win_lgpo_netsh.get_all_settings(profile="domain", store="local")
-    assert "Inbound" in ret
-    assert "Outbound" in ret
-    assert "FileName" in ret
-    assert "LogAllowedConnections" in ret
-    assert "LogDroppedConnections" in ret
-    assert "MaxFileSize" in ret
-    assert "InboundUserNotification" in ret
-    assert "LocalConSecRules" in ret
-    assert "LocalFirewallRules" in ret
-    assert "UnicastResponseToMulticast" in ret
-    assert "State" in ret
-
-
-def test_get_all_profiles_local():
-    ret = win_lgpo_netsh.get_all_profiles(store="local")
-    assert "Domain Profile" in ret
-    assert "Private Profile" in ret
-    assert "Public Profile" in ret
-
-
-def test_get_all_profiles_lgpo():
-    ret = win_lgpo_netsh.get_all_profiles(store="lgpo")
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+def test_get_all_profiles(store):
+    ret = win_lgpo_netsh.get_all_profiles(store=store)
     assert "Domain Profile" in ret
     assert "Private Profile" in ret
     assert "Public Profile" in ret
 
 
 @pytest.mark.destructive_test
-def test_set_firewall_settings_inbound_local():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="firewallpolicy", store="local"
-    )["Inbound"]
-    try:
-        ret = win_lgpo_netsh.set_firewall_settings(
-            profile="domain", inbound="allowinbound", store="local"
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="firewallpolicy", store="local"
-        )["Inbound"]
-        assert new == "AllowInbound"
-    finally:
-        ret = win_lgpo_netsh.set_firewall_settings(
-            profile="domain", inbound=current, store="local"
-        )
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_settings_inbound_local_notconfigured():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="firewallpolicy", store="local"
-    )["Inbound"]
-    try:
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize(
+    "inbound", ["allowinbound", "blockinbound", "blockinboundalways", "notconfigured"]
+)
+def test_set_firewall_settings_inbound(store, inbound):
+    if inbound == "notconfigured" and store == "local":
         pytest.raises(
             CommandExecutionError,
             win_lgpo_netsh.set_firewall_settings,
             profile="domain",
-            inbound="notconfigured",
-            store="local",
+            inbound=inbound,
+            store=store,
         )
-    finally:
-        ret = win_lgpo_netsh.set_firewall_settings(
-            profile="domain", inbound=current, store="local"
-        )
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_settings_inbound_lgpo_notconfigured():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="firewallpolicy", store="lgpo"
-    )["Inbound"]
-    try:
-        ret = win_lgpo_netsh.set_firewall_settings(
-            profile="domain", inbound="notconfigured", store="lgpo"
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="firewallpolicy", store="lgpo"
+    else:
+        current = win_lgpo_netsh.get_settings(
+            profile="domain", section="firewallpolicy", store=store
         )["Inbound"]
-        assert new == "NotConfigured"
-    finally:
-        ret = win_lgpo_netsh.set_firewall_settings(
-            profile="domain", inbound=current, store="lgpo"
-        )
-        assert ret is True
+        try:
+            ret = win_lgpo_netsh.set_firewall_settings(
+                profile="domain", inbound=inbound, store=store
+            )
+            assert ret is True
+            new = win_lgpo_netsh.get_settings(
+                profile="domain", section="firewallpolicy", store=store
+            )["Inbound"]
+            assert new.lower() == inbound
+        finally:
+            ret = win_lgpo_netsh.set_firewall_settings(
+                profile="domain", inbound=current, store=store
+            )
+            assert ret is True
 
 
 @pytest.mark.destructive_test
-def test_set_firewall_settings_outbound_local():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="firewallpolicy", store="local"
-    )["Outbound"]
-    try:
-        ret = win_lgpo_netsh.set_firewall_settings(
-            profile="domain", outbound="allowoutbound", store="local"
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize(
+    "outbound", ["allowoutbound", "blockoutbound", "notconfigured"]
+)
+def test_set_firewall_settings_outbound(store, outbound):
+    if outbound == "notconfigured" and store == "local":
+        pytest.raises(
+            CommandExecutionError,
+            win_lgpo_netsh.set_firewall_settings,
+            profile="domain",
+            inbound=outbound,
+            store=store,
         )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="firewallpolicy", store="local"
+    else:
+        current = win_lgpo_netsh.get_settings(
+            profile="domain", section="firewallpolicy", store=store
         )["Outbound"]
-        assert new == "AllowOutbound"
-    finally:
-        ret = win_lgpo_netsh.set_firewall_settings(
-            profile="domain", outbound=current, store="local"
-        )
-        assert ret is True
+        try:
+            ret = win_lgpo_netsh.set_firewall_settings(
+                profile="domain", outbound=outbound, store=store
+            )
+            assert ret is True
+            new = win_lgpo_netsh.get_settings(
+                profile="domain", section="firewallpolicy", store=store
+            )["Outbound"]
+            assert new.lower() == outbound
+        finally:
+            ret = win_lgpo_netsh.set_firewall_settings(
+                profile="domain", outbound=current, store=store
+            )
+            assert ret is True
 
 
 @pytest.mark.destructive_test
-def test_set_firewall_logging_allowed_local_enable():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="logging", store="local"
-    )["LogAllowedConnections"]
-    try:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain",
-            setting="allowedconnections",
-            value="enable",
-            store="local",
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="logging", store="local"
-        )["LogAllowedConnections"]
-        assert new == "Enable"
-    finally:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain",
-            setting="allowedconnections",
-            value=current,
-            store="local",
-        )
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_logging_allowed_local_notconfigured():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="logging", store="local"
-    )["LogAllowedConnections"]
-    try:
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize("setting", ["allowedconnections", "droppedconnections"])
+@pytest.mark.parametrize("value", ["enable", "disable", "notconfigured"])
+def test_set_firewall_logging_connections(store, setting, value):
+    if value == "notconfigured" and store == "local":
         pytest.raises(
             CommandExecutionError,
             win_lgpo_netsh.set_logging_settings,
             profile="domain",
-            setting="allowedconnections",
-            value="notconfigured",
-            store="local",
+            setting=setting,
+            value=value,
+            store=store,
         )
-    finally:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain",
-            setting="allowedconnections",
-            value=current,
-            store="local",
-        )
-        assert ret is True
+    else:
+        setting_map = {
+            "allowedconnections": "LogAllowedConnections",
+            "droppedconnections": "LogDroppedConnections",
+        }
+        current = win_lgpo_netsh.get_settings(
+            profile="domain", section="logging", store=store
+        )[setting_map[setting]]
+        try:
+            ret = win_lgpo_netsh.set_logging_settings(
+                profile="domain",
+                setting=setting,
+                value=value,
+                store=store,
+            )
+            assert ret is True
+            new = win_lgpo_netsh.get_settings(
+                profile="domain", section="logging", store=store
+            )[setting_map[setting]]
+            assert new.lower() == value
+        finally:
+            ret = win_lgpo_netsh.set_logging_settings(
+                profile="domain",
+                setting=setting,
+                value=current,
+                store=store,
+            )
+            assert ret is True
 
 
 @pytest.mark.destructive_test
-def test_set_firewall_logging_allowed_lgpo_notconfigured():
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize("value", ["C:\\Temp\\test.log", "notconfigured"])
+def test_set_firewall_logging_filename(store, value):
     current = win_lgpo_netsh.get_settings(
-        profile="domain", section="logging", store="lgpo"
-    )["LogAllowedConnections"]
-    try:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain",
-            setting="allowedconnections",
-            value="notconfigured",
-            store="lgpo",
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="logging", store="lgpo"
-        )["LogAllowedConnections"]
-        assert new == "NotConfigured"
-    finally:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain",
-            setting="allowedconnections",
-            value=current,
-            store="lgpo",
-        )
-        assert ret is True
-
-
-def test_set_firewall_logging_dropped_local_enable():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="logging", store="local"
-    )["LogDroppedConnections"]
-    try:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain",
-            setting="droppedconnections",
-            value="enable",
-            store="local",
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="logging", store="local"
-        )["LogDroppedConnections"]
-        assert new == "Enable"
-    finally:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain",
-            setting="droppedconnections",
-            value=current,
-            store="local",
-        )
-        assert ret is True
-
-
-def test_set_firewall_logging_filename_local():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="logging", store="local"
+        profile="domain", section="logging", store=store
     )["FileName"]
     try:
         ret = win_lgpo_netsh.set_logging_settings(
             profile="domain",
             setting="filename",
-            value="C:\\Temp\\test.log",
-            store="local",
+            value=value,
+            store=store,
         )
         assert ret is True
         new = win_lgpo_netsh.get_settings(
-            profile="domain", section="logging", store="local"
+            profile="domain", section="logging", store=store
         )["FileName"]
-        assert new == "C:\\Temp\\test.log"
+        assert new.lower() == value.lower()
     finally:
         ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain", setting="filename", value=current, store="local"
+            profile="domain", setting="filename", value=current, store=store
         )
         assert ret is True
 
 
-def test_set_firewall_logging_maxfilesize_local():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="logging", store="local"
-    )["MaxFileSize"]
-    try:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain", setting="maxfilesize", value="16384", store="local"
+@pytest.mark.destructive_test
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize("value", ["16384", "notconfigured"])
+def test_set_firewall_logging_maxfilesize(store, value):
+    if value == "notconfigured":
+        pytest.raises(
+            CommandExecutionError,
+            win_lgpo_netsh.set_logging_settings,
+            profile="domain",
+            setting="maxfilesize",
+            value=value,
+            store=store,
         )
+    else:
+        current = win_lgpo_netsh.get_settings(
+            profile="domain", section="logging", store=store
+        )["MaxFileSize"]
+        try:
+            ret = win_lgpo_netsh.set_logging_settings(
+                profile="domain", setting="maxfilesize", value=value, store=store
+            )
+            assert ret is True
+            new = win_lgpo_netsh.get_settings(
+                profile="domain", section="logging", store=store
+            )["MaxFileSize"]
+            assert new == int(value)
+        finally:
+            ret = win_lgpo_netsh.set_logging_settings(
+                profile="domain", setting="maxfilesize", value=current, store=store
+            )
+            assert ret is True
+
+
+@pytest.mark.destructive_test
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize(
+    "setting",
+    ["localconsecrules", "inboundusernotification", "unicastresponsetomulticast"],
+)
+@pytest.mark.parametrize("value", ["enable", "disable", "notconfigured"])
+def test_set_firewall_settings(store, setting, value):
+    setting_map = {
+        "localconsecrules": "LocalConSecRules",
+        "inboundusernotification": "InboundUserNotification",
+        "unicastresponsetomulticast": "UnicastResponseToMulticast",
+    }
+    if value == "notconfigured" and store == "local":
+        pytest.raises(
+            CommandExecutionError,
+            win_lgpo_netsh.set_settings,
+            profile="domain",
+            setting=setting,
+            value=value,
+            store=store,
+        )
+    else:
+        current = win_lgpo_netsh.get_settings(
+            profile="domain", section="settings", store=store
+        )[setting_map[setting]]
+        try:
+            ret = win_lgpo_netsh.set_settings(
+                profile="domain",
+                setting=setting,
+                value=value,
+                store=store,
+            )
+            assert ret is True
+            new = win_lgpo_netsh.get_settings(
+                profile="domain", section="settings", store=store
+            )[setting_map[setting]]
+            assert new.lower() == value
+        finally:
+            if current != "notconfigured":
+                ret = win_lgpo_netsh.set_settings(
+                    profile="domain",
+                    setting=setting,
+                    value=current,
+                    store=store,
+                )
+            assert ret is True
+
+
+@pytest.mark.destructive_test
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize("state", ["on", "off", "notconfigured"])
+def test_set_firewall_state(store, state):
+    current_state = win_lgpo_netsh.get_settings(
+        profile="domain", section="state", store=store
+    )["State"]
+    try:
+        ret = win_lgpo_netsh.set_state(profile="domain", state=state, store=store)
         assert ret is True
         new = win_lgpo_netsh.get_settings(
-            profile="domain", section="logging", store="local"
-        )["MaxFileSize"]
-        assert new == 16384
+            profile="domain", section="state", store=store
+        )["State"]
+        assert new.lower() == state.lower()
     finally:
-        ret = win_lgpo_netsh.set_logging_settings(
-            profile="domain", setting="maxfilesize", value=current, store="local"
-        )
-        assert ret is True
+        win_lgpo_netsh.set_state(profile="domain", state=current_state, store=store)
 
 
 @pytest.mark.destructive_test
-def test_set_firewall_settings_fwrules_local_enable():
-    pytest.raises(
-        CommandExecutionError,
-        win_lgpo_netsh.set_settings,
-        profile="domain",
-        setting="localfirewallrules",
-        value="enable",
-        store="local",
-    )
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_settings_fwrules_lgpo_notconfigured():
-    current = win_lgpo_netsh.get_settings(
+@pytest.mark.parametrize("store", ["local", "lgpo"])
+@pytest.mark.parametrize("allow_inbound", ["enable", "disable"])
+@pytest.mark.parametrize("state", ["on", "off", "notconfigured"])
+def test_set_firewall_state_allow_inbound(store, allow_inbound, state):
+    current_state = win_lgpo_netsh.get_settings(
+        profile="domain", section="state", store=store
+    )["State"]
+    current_local_fw_rules = win_lgpo_netsh.get_settings(
         profile="domain", section="settings", store="lgpo"
     )["LocalFirewallRules"]
     try:
         ret = win_lgpo_netsh.set_settings(
             profile="domain",
             setting="localfirewallrules",
-            value="notconfigured",
-            store="lgpo",
+            value=allow_inbound,
+            store=store,
         )
         assert ret is True
         new = win_lgpo_netsh.get_settings(
-            profile="domain", section="settings", store="lgpo"
+            profile="domain", section="settings", store=store
         )["LocalFirewallRules"]
-        assert new == "NotConfigured"
-    finally:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="localfirewallrules",
-            value=current,
-            store="lgpo",
-        )
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_settings_consecrules_local_enable():
-    pytest.raises(
-        CommandExecutionError,
-        win_lgpo_netsh.set_settings,
-        profile="domain",
-        setting="localconsecrules",
-        value="enable",
-        store="local",
-    )
-
-
-def test_set_firewall_settings_notification_local_enable():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="settings", store="local"
-    )["InboundUserNotification"]
-    try:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="inboundusernotification",
-            value="enable",
-            store="local",
-        )
+        assert new.lower() == allow_inbound.lower()
+        ret = win_lgpo_netsh.set_state(profile="domain", state=state, store=store)
         assert ret is True
         new = win_lgpo_netsh.get_settings(
-            profile="domain", section="settings", store="local"
-        )["InboundUserNotification"]
-        assert new == "Enable"
-    finally:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="inboundusernotification",
-            value=current,
-            store="local",
-        )
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_settings_notification_local_notconfigured():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="settings", store="local"
-    )["InboundUserNotification"]
-    try:
-        pytest.raises(
-            CommandExecutionError,
-            win_lgpo_netsh.set_settings,
-            profile="domain",
-            setting="inboundusernotification",
-            value="notconfigured",
-            store="local",
-        )
-    finally:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="inboundusernotification",
-            value=current,
-            store="local",
-        )
-        assert ret is True
-
-
-def test_set_firewall_settings_notification_lgpo_notconfigured():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="settings", store="lgpo"
-    )["InboundUserNotification"]
-    try:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="inboundusernotification",
-            value="notconfigured",
-            store="lgpo",
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="settings", store="lgpo"
-        )["InboundUserNotification"]
-        assert new == "NotConfigured"
-    finally:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="inboundusernotification",
-            value=current,
-            store="lgpo",
-        )
-        assert ret is True
-
-
-def test_set_firewall_settings_unicast_local_disable():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="settings", store="local"
-    )["UnicastResponseToMulticast"]
-    try:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="unicastresponsetomulticast",
-            value="disable",
-            store="local",
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="settings", store="local"
-        )["UnicastResponseToMulticast"]
-        assert new == "Disable"
-    finally:
-        ret = win_lgpo_netsh.set_settings(
-            profile="domain",
-            setting="unicastresponsetomulticast",
-            value=current,
-            store="local",
-        )
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_state_local_on():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="state", store="local"
-    )["State"]
-    try:
-        ret = win_lgpo_netsh.set_state(profile="domain", state="off", store="local")
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="state", store="local"
+            profile="domain", section="state", store=store
         )["State"]
-        assert new == "OFF"
+        assert new.lower() == state.lower()
     finally:
-        ret = win_lgpo_netsh.set_state(profile="domain", state=current, store="local")
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_state_local_notconfigured():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="state", store="local"
-    )["State"]
-    try:
-        ret = win_lgpo_netsh.set_state(
-            profile="domain",
-            state="notconfigured",
-            store="local",
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="state", store="local"
-        )["State"]
-        assert new == "NotConfigured"
-    finally:
-        ret = win_lgpo_netsh.set_state(profile="domain", state=current, store="local")
-        assert ret is True
-
-
-@pytest.mark.destructive_test
-def test_set_firewall_state_lgpo_notconfigured():
-    current = win_lgpo_netsh.get_settings(
-        profile="domain", section="state", store="local"
-    )["State"]
-    try:
-        ret = win_lgpo_netsh.set_state(
-            profile="domain", state="notconfigured", store="lgpo"
-        )
-        assert ret is True
-        new = win_lgpo_netsh.get_settings(
-            profile="domain", section="state", store="lgpo"
-        )["State"]
-        assert new == "NotConfigured"
-    finally:
-        ret = win_lgpo_netsh.set_state(profile="domain", state=current, store="lgpo")
-        assert ret is True
+        if current_local_fw_rules.lower() != "notconfigured":
+            win_lgpo_netsh.set_settings(
+                profile="domain",
+                setting="localfirewallrules",
+                value=current_local_fw_rules,
+                store=store,
+            )
+        win_lgpo_netsh.set_state(profile="domain", state=current_state, store=store)
diff --git a/tests/support/pkg.py b/tests/support/pkg.py
index 5d6afa5827c..6c67f948540 100644
--- a/tests/support/pkg.py
+++ b/tests/support/pkg.py
@@ -220,6 +220,7 @@ class SaltPkgInstall:
             version = self.prev_version
             parsed = packaging.version.parse(version)
             version = f"{parsed.major}.{parsed.minor}"
+        # ensure services stopped on Debian/Ubuntu (minic install for RedHat - non-starting)
         if self.distro_id in ("ubuntu", "debian"):
             self.stop_services()
         return version
@@ -488,9 +489,11 @@ class SaltPkgInstall:
             log.debug("Installing: %s", str(pkg))
             ret = self.proc.run("installer", "-pkg", str(pkg), "-target", "/")
             self._check_retcode(ret)
+
             # Stop the service installed by the installer
             self.proc.run("launchctl", "disable", f"system/{service_name}")
             self.proc.run("launchctl", "bootout", "system", str(plist_file))
+
         elif upgrade:
             env = os.environ.copy()
             extra_args = []
@@ -588,17 +591,16 @@ class SaltPkgInstall:
 
     def stop_services(self):
         """
-        Debian distros automatically start the services
-        We want to ensure our tests start with the config
-        settings we have set. This will also verify the expected
-        services are up and running.
+        Debian/Ubuntu distros automatically start the services on install
+        We want to ensure our tests start with the config settings we have set.
+        This will also verify the expected services are up and running.
         """
         retval = True
         for service in ["salt-syndic", "salt-master", "salt-minion"]:
             check_run = self.proc.run("systemctl", "status", service)
             if check_run.returncode != 0:
-                # The system was not started automatically and we
-                # are expecting it to be on install
+                # The system was not started automatically and
+                # we are expecting it to be on install on Debian/Ubuntu systems
                 log.debug("The service %s was not started on install.", service)
                 retval = False
             else:
@@ -606,10 +608,26 @@ class SaltPkgInstall:
                 self._check_retcode(stop_service)
         return retval
 
+    def restart_services(self):
+        """
+        Debian/Ubuntu distros automatically start the services
+        We want to ensure our tests start with the config settings we have set,
+        for example: after install the services are stopped (similar to RedHat not starting services on install)
+        This will also verify the expected services are up and running.
+        """
+        for service in ["salt-minion", "salt-master", "salt-syndic"]:
+            check_run = self.proc.run("systemctl", "status", service)
+            log.debug(
+                "The restart_services status, before restart, for service %s is %s.",
+                service,
+                check_run,
+            )
+            restart_service = self.proc.run("systemctl", "restart", service)
+            self._check_retcode(restart_service)
+
     def install_previous(self, downgrade=False):
         """
-        Install previous version. This is used for
-        upgrade tests.
+        Install previous version. This is used for upgrade tests.
         """
         major_ver = packaging.version.parse(self.prev_version).major
         relenv = packaging.version.parse(self.prev_version) >= packaging.version.parse(
@@ -618,9 +636,7 @@ class SaltPkgInstall:
         distro_name = self.distro_name
         if distro_name in ("almalinux", "rocky", "centos", "fedora"):
             distro_name = "redhat"
-        root_url = "salt/py3/"
-        if self.classic:
-            root_url = "py3/"
+        root_url = "https://packages.broadcom.com/artifactory"
 
         if self.distro_name in [
             "almalinux",
@@ -635,11 +651,6 @@ class SaltPkgInstall:
             # Removing EPEL repo files
             for fp in pathlib.Path("/etc", "yum.repos.d").glob("epel*"):
                 fp.unlink()
-            gpg_key = "SALTSTACK-GPG-KEY.pub"
-            if self.distro_version == "9":
-                gpg_key = "SALTSTACK-GPG-KEY2.pub"
-            if relenv:
-                gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.pub"
 
             if platform.is_aarch64():
                 arch = "arm64"
@@ -653,11 +664,11 @@ class SaltPkgInstall:
             ret = self.proc.run(
                 "rpm",
                 "--import",
-                f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}/{gpg_key}",
+                "https://packages.broadcom.com/artifactory/api/security/keypair/SaltProjectKey/public",
             )
             self._check_retcode(ret)
             download_file(
-                f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}.repo",
+                "https://github.com/saltstack/salt-install-guide/releases/latest/download/salt.repo",
                 f"/etc/yum.repos.d/salt-{distro_name}.repo",
             )
             if self.distro_name == "photon":
@@ -704,40 +715,36 @@ class SaltPkgInstall:
                 arch = "arm64"
             else:
                 arch = "amd64"
-            pathlib.Path("/etc/apt/keyrings").mkdir(parents=True, exist_ok=True)
-            gpg_dest = "salt-archive-keyring.gpg"
-            gpg_key = gpg_dest
-            if relenv:
-                gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.gpg"
 
+            pathlib.Path("/etc/apt/keyrings").mkdir(parents=True, exist_ok=True)
+            gpg_full_path = "/etc/apt/keyrings/salt-archive-keyring.gpg"
+
+            # download the gpg pub key
             download_file(
-                f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}/{gpg_key}",
-                f"/etc/apt/keyrings/{gpg_dest}",
+                f"{root_url}/api/security/keypair/SaltProjectKey/public",
+                f"{gpg_full_path}",
             )
             with salt.utils.files.fopen(
                 pathlib.Path("/etc", "apt", "sources.list.d", "salt.list"), "w"
             ) as fp:
                 fp.write(
-                    f"deb [signed-by=/etc/apt/keyrings/{gpg_dest} arch={arch}] "
-                    f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver} {self.distro_codename} main"
+                    f"deb [signed-by={gpg_full_path} arch={arch}] "
+                    f"{root_url}/saltproject-deb/ {self.distro_codename} main"
                 )
             self._check_retcode(ret)
 
-            cmd = [
-                self.pkg_mngr,
-                "install",
-                *self.salt_pkgs,
-                "-y",
-            ]
+            cmd = [self.pkg_mngr, "install", *self.salt_pkgs, "-y"]
 
             if downgrade:
                 pref_file = pathlib.Path("/etc", "apt", "preferences.d", "salt.pref")
                 pref_file.parent.mkdir(exist_ok=True)
+                # TODO: There's probably something I should put in here to say what version
+                # TODO: But maybe that's done elsewhere, hopefully in self.salt_pkgs
                 pref_file.write_text(
                     textwrap.dedent(
-                        """\
+                        f"""\
                 Package: salt*
-                Pin: origin "repo.saltproject.io"
+                Pin: origin "{root_url}/saltproject-deb"
                 Pin-Priority: 1001
                 """
                     ),
@@ -752,7 +759,7 @@ class SaltPkgInstall:
                 "-o",
                 "DPkg::Options::=--force-confold",
             ]
-            ret = self.proc.run(self.pkg_mngr, "update", *extra_args, env=env)
+            self.proc.run(self.pkg_mngr, "update", *extra_args, env=env)
 
             cmd.extend(extra_args)
 
@@ -772,31 +779,20 @@ class SaltPkgInstall:
         elif platform.is_windows():
             self.bin_dir = self.install_dir / "bin"
             self.run_root = self.bin_dir / "salt.exe"
-            self.ssm_bin = self.bin_dir / "ssm.exe"
-            if self.file_ext == "msi" or relenv:
-                self.ssm_bin = self.install_dir / "ssm.exe"
+            self.ssm_bin = self.install_dir / "ssm.exe"
 
-            if not self.classic:
-                if not relenv:
-                    win_pkg = (
-                        f"salt-{self.prev_version}-1-windows-amd64.{self.file_ext}"
-                    )
-                else:
-                    if self.file_ext == "msi":
-                        win_pkg = (
-                            f"Salt-Minion-{self.prev_version}-Py3-AMD64.{self.file_ext}"
-                        )
-                    elif self.file_ext == "exe":
-                        win_pkg = f"Salt-Minion-{self.prev_version}-Py3-AMD64-Setup.{self.file_ext}"
-                win_pkg_url = f"https://repo.saltproject.io/salt/py3/windows/{major_ver}/{win_pkg}"
+            if self.file_ext == "exe":
+                win_pkg = (
+                    f"Salt-Minion-{self.prev_version}-Py3-AMD64-Setup.{self.file_ext}"
+                )
+            elif self.file_ext == "msi":
+                win_pkg = f"Salt-Minion-{self.prev_version}-Py3-AMD64.{self.file_ext}"
             else:
-                if self.file_ext == "msi":
-                    win_pkg = (
-                        f"Salt-Minion-{self.prev_version}-Py3-AMD64.{self.file_ext}"
-                    )
-                elif self.file_ext == "exe":
-                    win_pkg = f"Salt-Minion-{self.prev_version}-Py3-AMD64-Setup.{self.file_ext}"
-                win_pkg_url = f"https://repo.saltproject.io/windows/{win_pkg}"
+                log.debug("Unknown windows file extension: %s", self.file_ext)
+
+            win_pkg_url = (
+                f"{root_url}/saltproject-generic/windows/{self.prev_version}/{win_pkg}"
+            )
             pkg_path = pathlib.Path(r"C:\TEMP", win_pkg)
             pkg_path.parent.mkdir(exist_ok=True)
             download_file(win_pkg_url, pkg_path)
@@ -829,17 +825,17 @@ class SaltPkgInstall:
                 self._install_ssm_service()
 
         elif platform.is_darwin():
-            if self.classic:
-                mac_pkg = f"salt-{self.prev_version}-py3-x86_64.pkg"
-                mac_pkg_url = f"https://repo.saltproject.io/osx/{mac_pkg}"
+            if relenv and platform.is_aarch64():
+                arch = "arm64"
+            elif platform.is_aarch64() and self.classic:
+                arch = "arm64"
             else:
-                if not relenv:
-                    mac_pkg = f"salt-{self.prev_version}-1-macos-x86_64.pkg"
-                else:
-                    mac_pkg = f"salt-{self.prev_version}-py3-x86_64.pkg"
-                mac_pkg_url = (
-                    f"https://repo.saltproject.io/salt/py3/macos/{major_ver}/{mac_pkg}"
-                )
+                arch = "x86_64"
+
+            mac_pkg = f"salt-{self.prev_version}-py3-{arch}.pkg"
+            mac_pkg_url = (
+                f"{root_url}/saltproject-generic/macos/{self.prev_version}/{mac_pkg}"
+            )
 
             mac_pkg_path = f"/tmp/{mac_pkg}"
             if not os.path.exists(mac_pkg_path):
@@ -1011,12 +1007,12 @@ class SaltPkgInstall:
     def __enter__(self):
         if platform.is_windows():
             self.update_process_path()
-
-        if not self.no_install:
-            if self.upgrade:
-                self.install_previous()
-            else:
-                self.install()
+        if self.no_install:
+            return self
+        if self.upgrade:
+            self.install_previous()
+        else:
+            self.install()
         return self
 
     def __exit__(self, *_):
diff --git a/tests/support/pytest/helpers.py b/tests/support/pytest/helpers.py
index 5ca17913b6b..e7f38aafaa3 100644
--- a/tests/support/pytest/helpers.py
+++ b/tests/support/pytest/helpers.py
@@ -516,6 +516,8 @@ class FakeSaltExtension:
             setup_requires =
               wheel
               setuptools>=50.3.2
+            install_requires =
+              distro
 
             [options.packages.find]
             where = src
diff --git a/tests/support/virt.py b/tests/support/virt.py
index c651b01adcc..1802c3c3966 100644
--- a/tests/support/virt.py
+++ b/tests/support/virt.py
@@ -110,7 +110,28 @@ class SaltVirtMinionContainerFactory(SaltMinion):
             "-m",
             "pip",
             "install",
-            f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt",
+            "-r",
+            f"/salt/requirements/static/pkg/py{requirements_py_version}/linux.txt",
+        )
+        log.debug("Install Salt Dependencies in the container: %s", ret.stderr)
+        assert ret.returncode == 0
+        ret = self.run(
+            self.python_executable,
+            "-m",
+            "pip",
+            "install",
+            f"--constraint=/salt/requirements/static/pkg/py{requirements_py_version}/linux.txt",
+            "-r",
+            f"/salt/requirements/static/pkg/py{requirements_py_version}/linux.txt",
+        )
+        log.debug("Install Salt Dependencies in the container: %s", ret.stderr)
+        assert ret.returncode == 0
+        ret = self.run(
+            self.python_executable,
+            "-m",
+            "pip",
+            "install",
+            f"--constraint=/salt/requirements/static/pkg/py{requirements_py_version}/linux.txt",
             "/salt",
         )
         log.debug("Install Salt in the container: %s", ret)
diff --git a/tests/support/win_installer.py b/tests/support/win_installer.py
index b41586a6806..846245194b0 100644
--- a/tests/support/win_installer.py
+++ b/tests/support/win_installer.py
@@ -10,18 +10,46 @@
 """
 
 import hashlib
+from html.parser import HTMLParser
 
 import requests
 
 PREFIX = "Salt-Minion-"
-REPO = "https://repo.saltproject.io/windows"
+REPO = "https://packages.broadcom.com/artifactory/saltproject-generic/windows/"
 
 
 def latest_installer_name(arch="AMD64", **kwargs):
     """
     Create an installer file name
     """
-    return f"Salt-Minion-Latest-Py3-{arch}-Setup.exe"
+
+    # This is where windows packages are found
+    # Each version is in its own directory, so we need to list the directories
+    # and use the last one as the latest
+    html_response = requests.get(REPO, timeout=60)
+
+    versions = []
+
+    # Create a class so we can define how to handle the starttag
+    # We're looking for a "href" in the "a" tag which is the version
+    class MyHTMLParser(HTMLParser):
+
+        def handle_starttag(self, tag, attrs):
+            # Only parse the 'anchor' tag.
+            if tag == "a":
+                # Check the list of defined attributes.
+                for name, value in attrs:
+                    # If href is defined, add the value to the list of versions
+                    if name == "href":
+                        versions.append(value.strip("/"))
+
+    parser = MyHTMLParser()
+    parser.feed(html_response.text)
+    parser.close()
+
+    latest = versions[-1]
+
+    return f"{PREFIX}{latest}-Py3-{arch}-Setup.exe"
 
 
 def download_and_verify(fp, name, repo=REPO):
diff --git a/tests/unit/modules/test_localemod.py b/tests/unit/modules/test_localemod.py
index cc2706b20be..e7c147ed956 100644
--- a/tests/unit/modules/test_localemod.py
+++ b/tests/unit/modules/test_localemod.py
@@ -2,6 +2,8 @@
     :codeauthor: Rupesh Tare <rupesht@saltstack.com>
 """
 
+import subprocess
+
 import pytest
 
 import salt.modules.localemod as localemod
@@ -11,6 +13,15 @@ from tests.support.mock import MagicMock, Mock, patch
 from tests.support.unit import TestCase
 
 
+def _check_localectl():
+    if not hasattr(_check_localectl, "memo"):
+        proc = subprocess.run(["localectl"], check=False, capture_output=True)
+        _check_localectl.memo = (
+            b"Failed to connect to bus: No such file or directory" in proc.stderr
+        )
+    return _check_localectl.memo
+
+
 class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
     """
     Test cases for salt.modules.localemod
@@ -55,6 +66,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
         ):
             assert localemod.list_avail() == ["A", "B"]
 
+    @pytest.mark.skipif(_check_localectl, reason="localectl is in degraded state")
     @patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/localctl"))
     @patch(
         "salt.modules.localemod.__salt__",
@@ -87,6 +99,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
         assert "data" in out["x11_model"]
         assert out["x11_model"]["data"] == "pc105"
 
+    @pytest.mark.skipif(_check_localectl, reason="localectl is in degraded state")
     @patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/localctl"))
     @patch(
         "salt.modules.localemod.__salt__",
@@ -165,6 +178,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
         assert 'Unable to find "localectl"' in str(exc_info.value)
         assert not localemod.log.debug.called
 
+    @pytest.mark.skipif(_check_localectl, reason="localectl is in degraded state")
     @patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/localctl"))
     @patch(
         "salt.modules.localemod.__salt__",
@@ -175,6 +189,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
             localemod._localectl_status()
         assert 'Unable to parse result of "localectl"' in str(exc_info.value)
 
+    @pytest.mark.skipif(_check_localectl, reason="localectl is in degraded state")
     @patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/localctl"))
     @patch(
         "salt.modules.localemod.__salt__",
@@ -185,6 +200,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
             localemod._localectl_status()
         assert 'Unable to parse result of "localectl"' in str(exc_info.value)
 
+    @pytest.mark.skipif(_check_localectl, reason="localectl is in degraded state")
     @patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/localctl"))
     @patch(
         "salt.modules.localemod.__salt__",
@@ -831,6 +847,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
         ):
             assert localemod.gen_locale("en_US.UTF-8", verbose=True) == ret
 
+    @pytest.mark.skipif(_check_localectl, reason="localectl is in degraded state")
     @patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/localctl"))
     def test_parse_localectl(self):
         localectl_out = (
diff --git a/tests/unit/test_module_names.py b/tests/unit/test_module_names.py
index 15d06e0ed66..147945f02b3 100644
--- a/tests/unit/test_module_names.py
+++ b/tests/unit/test_module_names.py
@@ -44,6 +44,7 @@ EXCLUDED_FILES = [
     os.path.join("tests", "wheeltest.py"),
     os.path.join("tests", "zypp_plugin.py"),
     os.path.join("tests", "pytests", "functional", "cache", "helpers.py"),
+    os.path.join("tests", "pytests", "functional", "transport", "base.py"),
     os.path.join("tests", "pytests", "unit", "states", "virt", "helpers.py"),
 ]
 
diff --git a/tests/unit/utils/test_systemd.py b/tests/unit/utils/test_systemd.py
index 20e1aaeda0c..a68aa186d6a 100644
--- a/tests/unit/utils/test_systemd.py
+++ b/tests/unit/utils/test_systemd.py
@@ -173,18 +173,20 @@ class SystemdTestCase(TestCase):
             # return data, so it is sufficient enough to mock it as True for
             # these tests.
             with patch("os.stat", side_effect=_booted_effect):
-                # Test without context dict passed
-                self.assertEqual(_systemd.has_scope(), _expected)
-                # Test that context key is set when context dict is passed
-                context = {}
-                self.assertEqual(_systemd.has_scope(context), _expected)
-                self.assertEqual(
-                    context,
-                    {
-                        "salt.utils.systemd.booted": True,
-                        "salt.utils.systemd.version": _version,
-                    },
-                )
+                with patch("salt.utils.systemd.status", return_value=True):
+                    # Test without context dict passed
+                    self.assertEqual(_systemd.has_scope(), _expected)
+                    context = {"salt.utils.systemd.status": True}
+                    # Test that context key is set when context dict is passed
+                    self.assertEqual(_systemd.has_scope(context), _expected)
+                    self.assertEqual(
+                        context,
+                        {
+                            "salt.utils.systemd.booted": True,
+                            "salt.utils.systemd.status": True,
+                            "salt.utils.systemd.version": _version,
+                        },
+                    )
 
     def test_has_scope_systemd205(self):
         """
@@ -207,18 +209,20 @@ class SystemdTestCase(TestCase):
             # return data, so it is sufficient enough to mock it as True for
             # these tests.
             with patch("os.stat", side_effect=_booted_effect):
-                # Test without context dict passed
-                self.assertEqual(_systemd.has_scope(), _expected)
-                # Test that context key is set when context dict is passed
-                context = {}
-                self.assertEqual(_systemd.has_scope(context), _expected)
-                self.assertEqual(
-                    context,
-                    {
-                        "salt.utils.systemd.booted": True,
-                        "salt.utils.systemd.version": _version,
-                    },
-                )
+                with patch("salt.utils.systemd.status", return_value=True):
+                    # Test without context dict passed
+                    self.assertEqual(_systemd.has_scope(), _expected)
+                    # Test that context key is set when context dict is passed
+                    context = {"salt.utils.systemd.status": True}
+                    self.assertEqual(_systemd.has_scope(context), _expected)
+                    self.assertEqual(
+                        context,
+                        {
+                            "salt.utils.systemd.booted": True,
+                            "salt.utils.systemd.version": _version,
+                            "salt.utils.systemd.status": True,
+                        },
+                    )
 
     def test_has_scope_systemd206(self):
         """
@@ -241,18 +245,20 @@ class SystemdTestCase(TestCase):
             # return data, so it is sufficient enough to mock it as True for
             # these tests.
             with patch("os.stat", side_effect=_booted_effect):
-                # Test without context dict passed
-                self.assertEqual(_systemd.has_scope(), _expected)
-                # Test that context key is set when context dict is passed
-                context = {}
-                self.assertEqual(_systemd.has_scope(context), _expected)
-                self.assertEqual(
-                    context,
-                    {
-                        "salt.utils.systemd.booted": True,
-                        "salt.utils.systemd.version": _version,
-                    },
-                )
+                with patch("salt.utils.systemd.status", return_value=True):
+                    # Test without context dict passed
+                    self.assertEqual(_systemd.has_scope(), _expected)
+                    # Test that context key is set when context dict is passed
+                    context = {"salt.utils.systemd.status": True}
+                    self.assertEqual(_systemd.has_scope(context), _expected)
+                    self.assertEqual(
+                        context,
+                        {
+                            "salt.utils.systemd.booted": True,
+                            "salt.utils.systemd.version": _version,
+                            "salt.utils.systemd.status": True,
+                        },
+                    )
 
     def test_has_scope_no_systemd(self):
         """
diff --git a/tests/unit/utils/test_yamldumper.py b/tests/unit/utils/test_yamldumper.py
deleted file mode 100644
index 9a1a6ab103b..00000000000
--- a/tests/unit/utils/test_yamldumper.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-    Unit tests for salt.utils.yamldumper
-"""
-
-import salt.utils.yamldumper
-from tests.support.unit import TestCase
-
-
-class YamlDumperTestCase(TestCase):
-    """
-    TestCase for salt.utils.yamldumper module
-    """
-
-    def test_yaml_dump(self):
-        """
-        Test yaml.dump a dict
-        """
-        data = {"foo": "bar"}
-        exp_yaml = "{foo: bar}\n"
-
-        assert salt.utils.yamldumper.dump(data) == exp_yaml
-
-        assert salt.utils.yamldumper.dump(
-            data, default_flow_style=False
-        ) == exp_yaml.replace("{", "").replace("}", "")
-
-    def test_yaml_safe_dump(self):
-        """
-        Test yaml.safe_dump a dict
-        """
-        data = {"foo": "bar"}
-        assert salt.utils.yamldumper.safe_dump(data) == "{foo: bar}\n"
-
-        assert (
-            salt.utils.yamldumper.safe_dump(data, default_flow_style=False)
-            == "foo: bar\n"
-        )
diff --git a/tools/ci.py b/tools/ci.py
index 1eaf77fe2a9..92ba64d39dd 100644
--- a/tools/ci.py
+++ b/tools/ci.py
@@ -9,35 +9,25 @@ import json
 import logging
 import os
 import pathlib
+import pprint
 import random
 import shutil
 import sys
 import time
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Literal
 
 import yaml
 from ptscripts import Context, command_group
 
 import tools.utils
 import tools.utils.gh
-from tools.precommit.workflows import TEST_SALT_LISTING
+from tools.precommit.workflows import TEST_SALT_LISTING, TEST_SALT_PKG_LISTING
 
 if sys.version_info < (3, 11):
     from typing_extensions import NotRequired, TypedDict
 else:
     from typing import NotRequired, TypedDict  # pylint: disable=no-name-in-module
 
-try:
-    import boto3
-except ImportError:
-    print(
-        "\nPlease run 'python -m pip install -r "
-        "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info),
-        file=sys.stderr,
-        flush=True,
-    )
-    raise
-
 log = logging.getLogger(__name__)
 
 # Define the command group
@@ -153,93 +143,6 @@ def process_changed_files(ctx: Context, event_name: str, changed_files: pathlib.
     ctx.exit(0)
 
 
-@ci.command(
-    name="runner-types",
-    arguments={
-        "event_name": {
-            "help": "The name of the GitHub event being processed.",
-        },
-    },
-)
-def runner_types(ctx: Context, event_name: str):
-    """
-    Set GH Actions 'runners' output to know what can run where.
-    """
-    gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
-    if gh_event_path is None:
-        ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert gh_event_path is not None
-
-    github_output = os.environ.get("GITHUB_OUTPUT")
-    if github_output is None:
-        ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert github_output is not None
-
-    try:
-        gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
-    except Exception as exc:
-        ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)  # type: ignore[arg-type]
-        ctx.exit(1)
-
-    ctx.info("GH Event Payload:")
-    ctx.print(gh_event, soft_wrap=True)
-    # Let's it print until the end
-    time.sleep(1)
-
-    ctx.info("Selecting which type of runners(self hosted runners or not) to run")
-    runners = {"github-hosted": False, "self-hosted": False}
-    if event_name == "pull_request":
-        ctx.info("Running from a pull request event")
-        pr_event_data = gh_event["pull_request"]
-        if (
-            pr_event_data["head"]["repo"]["full_name"]
-            == pr_event_data["base"]["repo"]["full_name"]
-        ):
-            # If this is a pull request coming from the same repository, don't run anything
-            ctx.info("Pull request is coming from the same repository.")
-            ctx.info("Not running any jobs since they will run against the branch")
-            ctx.info("Writing 'runners' to the github outputs file:\n", runners)
-            with open(github_output, "a", encoding="utf-8") as wfh:
-                wfh.write(f"runners={json.dumps(runners)}\n")
-            ctx.exit(0)
-
-        # This is a PR from a forked repository
-        ctx.info("Pull request is not comming from the same repository")
-        runners["github-hosted"] = runners["self-hosted"] = True
-        ctx.info("Writing 'runners' to the github outputs file:\n", runners)
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"runners={json.dumps(runners)}\n")
-        ctx.exit(0)
-
-    # This is a push or a scheduled event
-    ctx.info(f"Running from a {event_name!r} event")
-    if (
-        gh_event["repository"]["fork"] is True
-        and os.environ.get("FORK_HAS_SELF_HOSTED_RUNNERS", "0") == "1"
-    ):
-        # This is running on a forked repository, don't run tests
-        ctx.info("The push event is on a forked repository")
-        runners["github-hosted"] = True
-        ctx.info("Writing 'runners' to the github outputs file:\n", runners)
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"runners={json.dumps(runners)}\n")
-        ctx.exit(0)
-
-    # Not running on a fork, or the fork has self hosted runners, run everything
-    ctx.info(f"The {event_name!r} event is from the main repository")
-    runners["github-hosted"] = runners["self-hosted"] = True
-    ctx.info("Writing 'runners' to the github outputs file:\n", runners)
-    with open(github_output, "a", encoding="utf-8") as wfh:
-        wfh.write(f"runners={json.dumps(runners)}")
-    ctx.exit(0)
-
-
 @ci.command(
     name="define-jobs",
     arguments={
@@ -394,6 +297,9 @@ def define_jobs(
         changed_files_contents["workflows"],
         changed_files_contents["golden_images"],
     }
+    if "test:os:all" in labels or any([_.startswith("test:os:macos") for _ in labels]):
+        jobs["build-deps-onedir-macos"] = True
+        jobs["build-salt-onedir-macos"] = True
     if jobs["test-pkg"] and required_pkg_test_changes == {"false"}:
         if "test:pkg" in labels:
             with open(github_step_summary, "a", encoding="utf-8") as wfh:
@@ -443,667 +349,21 @@ class TestRun(TypedDict):
     selected_tests: NotRequired[dict[str, bool]]
 
 
-@ci.command(
-    name="define-testrun",
-    arguments={
-        "event_name": {
-            "help": "The name of the GitHub event being processed.",
-        },
-        "changed_files": {
-            "help": (
-                "Path to '.json' file containing the payload of changed files "
-                "from the 'dorny/paths-filter' GitHub action."
-            ),
-        },
-    },
-)
-def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
+def _build_matrix(os_kind, linux_arm_runner):
     """
-    Set GH Actions outputs for what and how Salt should be tested.
+    Generate matrix for build ci/cd steps.
     """
-    github_output = os.environ.get("GITHUB_OUTPUT")
-    if github_output is None:
-        ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert github_output is not None
-
-    github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY")
-    if github_step_summary is None:
-        ctx.warn("The 'GITHUB_STEP_SUMMARY' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert github_step_summary is not None
-
-    labels: list[str] = []
-    gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
-    if gh_event_path is not None:
-        try:
-            gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
-        except Exception as exc:
-            ctx.error(
-                f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc  # type: ignore[arg-type]
-            )
-            ctx.exit(1)
-
-        labels.extend(
-            label[0] for label in _get_pr_test_labels_from_event_payload(gh_event)
-        )
-
-    if "test:coverage" in labels:
-        ctx.info("Writing 'testrun' to the github outputs file")
-        testrun = TestRun(type="full", skip_code_coverage=False)
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"testrun={json.dumps(testrun)}\n")
-        with open(github_step_summary, "a", encoding="utf-8") as wfh:
-            wfh.write(
-                "Full test run chosen because the label `test:coverage` is set.\n"
-            )
-        return
-    elif event_name != "pull_request":
-        # In this case, a full test run is in order
-        ctx.info("Writing 'testrun' to the github outputs file")
-        testrun = TestRun(type="full", skip_code_coverage=False)
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"testrun={json.dumps(testrun)}\n")
-
-        with open(github_step_summary, "a", encoding="utf-8") as wfh:
-            wfh.write(f"Full test run chosen due to event type of `{event_name}`.\n")
-        return
-
-    # So, it's a pull request...
-
-    if not changed_files.exists():
-        ctx.error(f"The '{changed_files}' file does not exist.")
-        ctx.error(
-            "FYI, the command 'tools process-changed-files <changed-files-path>' "
-            "needs to run prior to this one."
-        )
-        ctx.exit(1)
-    try:
-        changed_files_contents = json.loads(changed_files.read_text())
-    except Exception as exc:
-        ctx.error(f"Could not load the changed files from '{changed_files}': {exc}")
-        ctx.exit(1)
-
-    # Based on which files changed, or other things like PR labels we can
-    # decide what to run, or even if the full test run should be running on the
-    # pull request, etc...
-    changed_pkg_requirements_files = json.loads(
-        changed_files_contents["pkg_requirements_files"]
-    )
-    changed_test_requirements_files = json.loads(
-        changed_files_contents["test_requirements_files"]
-    )
-    if changed_files_contents["golden_images"] == "true":
-        with open(github_step_summary, "a", encoding="utf-8") as wfh:
-            wfh.write(
-                "Full test run chosen because there was a change made "
-                "to `cicd/golden-images.json`.\n"
-            )
-        testrun = TestRun(type="full", skip_code_coverage=True)
-    elif changed_pkg_requirements_files or changed_test_requirements_files:
-        with open(github_step_summary, "a", encoding="utf-8") as wfh:
-            wfh.write(
-                "Full test run chosen because there was a change made "
-                "to the requirements files.\n"
-            )
-            wfh.write(
-                "<details>\n<summary>Changed Requirements Files (click me)</summary>\n<pre>\n"
-            )
-            for path in sorted(
-                changed_pkg_requirements_files + changed_test_requirements_files
-            ):
-                wfh.write(f"{path}\n")
-            wfh.write("</pre>\n</details>\n")
-        testrun = TestRun(type="full", skip_code_coverage=True)
-    elif "test:full" in labels:
-        with open(github_step_summary, "a", encoding="utf-8") as wfh:
-            wfh.write("Full test run chosen because the label `test:full` is set.\n")
-        testrun = TestRun(type="full", skip_code_coverage=True)
-    else:
-        testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
-        testrun = TestRun(
-            type="changed",
-            skip_code_coverage=True,
-            from_filenames=str(
-                testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
-            ),
-        )
-        ctx.info(f"Writing {testrun_changed_files_path.name} ...")
-        selected_changed_files = []
-        for fpath in json.loads(changed_files_contents["testrun_files"]):
-            if fpath.startswith(("tools/", "tasks/")):
-                continue
-            if fpath in ("noxfile.py",):
-                continue
-            if fpath == "tests/conftest.py":
-                # In this particular case, just run the full test suite
-                testrun["type"] = "full"
-                with open(github_step_summary, "a", encoding="utf-8") as wfh:
-                    wfh.write(
-                        f"Full test run chosen because there was a change to `{fpath}`.\n"
-                    )
-            selected_changed_files.append(fpath)
-        testrun_changed_files_path.write_text("\n".join(sorted(selected_changed_files)))
-        if testrun["type"] == "changed":
-            with open(github_step_summary, "a", encoding="utf-8") as wfh:
-                wfh.write("Partial test run chosen.\n")
-            testrun["selected_tests"] = {
-                "core": False,
-                "slow": False,
-                "fast": True,
-                "flaky": False,
-            }
-            if "test:slow" in labels:
-                with open(github_step_summary, "a", encoding="utf-8") as wfh:
-                    wfh.write("Slow tests chosen by `test:slow` label.\n")
-                testrun["selected_tests"]["slow"] = True
-            if "test:core" in labels:
-                with open(github_step_summary, "a", encoding="utf-8") as wfh:
-                    wfh.write("Core tests chosen by `test:core` label.\n")
-                testrun["selected_tests"]["core"] = True
-            if "test:no-fast" in labels:
-                with open(github_step_summary, "a", encoding="utf-8") as wfh:
-                    wfh.write("Fast tests deselected by `test:no-fast` label.\n")
-                testrun["selected_tests"]["fast"] = False
-            if "test:flaky-jail" in labels:
-                with open(github_step_summary, "a", encoding="utf-8") as wfh:
-                    wfh.write("Flaky jailed tests chosen by `test:flaky-jail` label.\n")
-                testrun["selected_tests"]["flaky"] = True
-        if selected_changed_files:
-            with open(github_step_summary, "a", encoding="utf-8") as wfh:
-                wfh.write(
-                    "<details>\n<summary>Selected Changed Files (click me)</summary>\n<pre>\n"
-                )
-                for path in sorted(selected_changed_files):
-                    wfh.write(f"{path}\n")
-                wfh.write("</pre>\n</details>\n")
-
-    with open(github_step_summary, "a", encoding="utf-8") as wfh:
-        wfh.write("<details>\n<summary>All Changed Files (click me)</summary>\n<pre>\n")
-        for path in sorted(json.loads(changed_files_contents["repo_files"])):
-            wfh.write(f"{path}\n")
-        wfh.write("</pre>\n</details>\n")
-
-    ctx.info("Writing 'testrun' to the github outputs file:\n", testrun)
-    with open(github_output, "a", encoding="utf-8") as wfh:
-        wfh.write(f"testrun={json.dumps(testrun)}\n")
-
-
-@ci.command(
-    arguments={
-        "distro_slug": {
-            "help": "The distribution slug to generate the matrix for",
-        },
-        "full": {
-            "help": "Full test run",
-        },
-        "workflow": {
-            "help": "Which workflow is running",
-        },
-    },
-)
-def matrix(
-    ctx: Context,
-    distro_slug: str,
-    full: bool = False,
-    workflow: str = "ci",
-):
-    """
-    Generate the test matrix.
-    """
-    gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
-    if gh_event_path is None:
-        ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert gh_event_path is not None
-
-    gh_event = None
-    try:
-        gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
-    except Exception as exc:
-        ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert gh_event is not None
-
-    _matrix = []
-    _splits = {
-        "functional": 4,
-        "integration": 7,
-        "scenarios": 1,
-        "unit": 4,
-    }
-    for transport in ("zeromq", "tcp"):
-        if transport == "tcp":
-            if distro_slug not in (
-                "rockylinux-9",
-                "rockylinux-9-arm64",
-                "photonos-5",
-                "photonos-5-arm64",
-                "ubuntu-22.04",
-                "ubuntu-22.04-arm64",
-            ):
-                # Only run TCP transport tests on these distributions
-                continue
-        for chunk in ("unit", "functional", "integration", "scenarios"):
-            if transport == "tcp" and chunk in ("unit", "functional"):
-                # Only integration and scenarios shall be tested under TCP,
-                # the rest would be repeating tests
-                continue
-            if "macos" in distro_slug and chunk == "scenarios":
-                continue
-            splits = _splits.get(chunk) or 1
-            if full and splits > 1:
-                for split in range(1, splits + 1):
-                    _matrix.append(
-                        {
-                            "transport": transport,
-                            "tests-chunk": chunk,
-                            "test-group": split,
-                            "test-group-count": splits,
-                        }
-                    )
-            else:
-                _matrix.append({"transport": transport, "tests-chunk": chunk})
-
-    ctx.info("Generated matrix:")
-    if not _matrix:
-        ctx.print(" * `None`")
-    else:
-        for entry in _matrix:
-            ctx.print(" * ", entry, soft_wrap=True)
-
-    if (
-        gh_event["repository"]["fork"] is True
-        and "macos" in distro_slug
-        and "arm64" in distro_slug
-    ):
-        ctx.warn("Forks don't have access to MacOS 13 Arm64. Clearning the matrix.")
-        _matrix.clear()
-
-    if not _matrix:
-        build_reports = False
-        ctx.info("Not building reports because the matrix is empty")
-    else:
-        build_reports = True
-
-    github_output = os.environ.get("GITHUB_OUTPUT")
-    if github_output is not None:
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"matrix={json.dumps(_matrix)}\n")
-            wfh.write(f"build-reports={json.dumps(build_reports)}\n")
-    ctx.exit(0)
-
-
-@ci.command(
-    name="pkg-matrix",
-    arguments={
-        "distro_slug": {
-            "help": "The distribution slug to generate the matrix for",
-        },
-        "pkg_type": {
-            "help": "The type of package we are testing against",
-        },
-        "testing_releases": {
-            "help": "The salt releases to test upgrades against",
-            "nargs": "+",
-            "required": True,
-        },
-    },
-)
-def pkg_matrix(
-    ctx: Context,
-    distro_slug: str,
-    pkg_type: str,
-    testing_releases: list[tools.utils.Version] = None,
-):
-    """
-    Generate the test matrix.
-    """
-    gh_event = None
-    gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
-    if gh_event_path is None:
-        ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
-    else:
-        try:
-            gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
-        except Exception as exc:
-            ctx.error(
-                f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc
-            )
-
-    github_output = os.environ.get("GITHUB_OUTPUT")
-    if github_output is None:
-        ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
-
-    if TYPE_CHECKING:
-        assert testing_releases
-
-    still_testing_3005 = False
-    for release_version in testing_releases:
-        if still_testing_3005:
-            break
-        if release_version < tools.utils.Version("3006.0"):
-            still_testing_3005 = True
-
-    if still_testing_3005 is False:
-        ctx.error(
-            f"No longer testing 3005.x releases please update {__file__} "
-            "and remove this error and the logic above the error. There may "
-            "be other places that need code removed as well."
-        )
-        ctx.exit(1)
-
-    adjusted_versions = []
-    for ver in testing_releases:
-        if ver < tools.utils.Version("3006.0"):
-            adjusted_versions.append((ver, "classic"))
-            adjusted_versions.append((ver, "tiamat"))
-        else:
-            adjusted_versions.append((ver, "relenv"))
-    ctx.info(f"Will look for the following versions: {adjusted_versions}")
-
-    # Filter out the prefixes to look under
-    if "macos-" in distro_slug:
-        # We don't have golden images for macos, handle these separately
-        prefixes = {
-            "classic": "osx/",
-            "tiamat": "salt/py3/macos/minor/",
-            "relenv": "salt/py3/macos/minor/",
-        }
-    else:
-        parts = distro_slug.split("-")
-        name = parts[0]
-        version = parts[1]
-
-        if len(parts) > 2:
-            arch = parts[2]
-        elif name in ("debian", "ubuntu"):
-            arch = "amd64"
-        else:
-            arch = "x86_64"
-
-        if name == "amazonlinux":
-            name = "amazon"
-        elif name == "rockylinux":
-            name = "redhat"
-        elif "photon" in name:
-            name = "photon"
-
-        if name == "windows":
-            prefixes = {
-                "classic": "windows/",
-                "tiamat": "salt/py3/windows/minor",
-                "relenv": "salt/py3/windows/minor",
-            }
-        else:
-            prefixes = {
-                "classic": f"py3/{name}/{version}/{arch}/",
-                "tiamat": f"salt/py3/{name}/{version}/{arch}/minor/",
-                "relenv": f"salt/py3/{name}/{version}/{arch}/minor/",
-            }
-
-    s3 = boto3.client("s3")
-    paginator = s3.get_paginator("list_objects_v2")
-    _matrix = [
-        {
-            "tests-chunk": "install",
-            "version": None,
-        }
-    ]
-
-    for version, backend in adjusted_versions:
-        if (
-            distro_slug.startswith(("macos-", "debian-", "ubuntu-"))
-            or version.major < 3006
-        ):
-            # XXX: Temporarily skip problematic tests
-            ctx.warn(
-                f"Temporary skip builds on {distro_slug} for version {version} with backend {backend}"
-            )
-            continue
-
-        prefix = prefixes[backend]
-        # TODO: Remove this after 3009.0
-        if backend == "relenv" and version >= tools.utils.Version("3006.5"):
-            prefix.replace("/arm64/", "/aarch64/")
-        # Using a paginator allows us to list recursively and avoid the item limit
-        page_iterator = paginator.paginate(
-            Bucket=f"salt-project-{tools.utils.SPB_ENVIRONMENT}-salt-artifacts-release",
-            Prefix=prefix,
-        )
-        # Uses a jmespath expression to test if the wanted version is in any of the filenames
-        key_filter = f"Contents[?contains(Key, '{version}')][]"
-        if pkg_type == "MSI":
-            # TODO: Add this back when we add MSI upgrade and downgrade tests
-            # key_filter = f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.msi')]"
-            continue
-        elif pkg_type == "NSIS":
-            # XXX: Temporarily skip problematic tests
-            # key_filter = (
-            #    f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.exe')]"
-            # )
-            continue
-        objects = list(page_iterator.search(key_filter))
-        # Testing using `any` because sometimes the paginator returns `[None]`
-        if any(objects):
-            ctx.info(
-                f"Found {version} ({backend}) for {distro_slug}: {objects[0]['Key']}"
-            )
-            for session in ("upgrade", "downgrade"):
-                if session == "downgrade" and distro_slug.startswith(
-                    ("rockylinux", "amazonlinux", "centos")
-                ):
-                    # XXX: Temporarily skip problematic tests
-                    ctx.warn(
-                        f"Temporary skip {session} builds on {distro_slug} for version {version} "
-                        f"with backend {backend}"
-                    )
-                    continue
-                if backend == "classic":
-                    session += "-classic"
-                _matrix.append(
-                    {
-                        "tests-chunk": session,
-                        "version": str(version),
-                    }
-                )
-        else:
-            ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}")
-
-    ctx.info("Generated matrix:")
-    if not _matrix:
-        ctx.print(" * `None`")
-    else:
-        for entry in _matrix:
-            ctx.print(" * ", entry, soft_wrap=True)
-
-    if (
-        gh_event is not None
-        and gh_event["repository"]["fork"] is True
-        and "macos" in distro_slug
-        and "arm64" in distro_slug
-    ):
-        ctx.warn("Forks don't have access to MacOS 13 Arm64. Clearning the matrix.")
-        _matrix.clear()
-
-    if not _matrix:
-        build_reports = False
-        ctx.info("Not building reports because the matrix is empty")
-    else:
-        build_reports = True
-
-    if github_output is not None:
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"matrix={json.dumps(_matrix)}\n")
-            wfh.write(f"build-reports={json.dumps(build_reports)}\n")
-    ctx.exit(0)
-
-
-@ci.command(name="deps-matrix")
-def get_ci_deps_matrix(ctx: Context):
-    gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
-    if gh_event_path is None:
-        ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert gh_event_path is not None
-
-    github_output = os.environ.get("GITHUB_OUTPUT")
-    if github_output is None:
-        ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert github_output is not None
-
-    gh_event = None
-    try:
-        gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
-    except Exception as exc:
-        ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert gh_event is not None
-
-    _matrix = {
-        "linux": [
-            {"distro-slug": "amazonlinux-2", "arch": "x86_64"},
-            {"distro-slug": "amazonlinux-2-arm64", "arch": "arm64"},
-        ],
-        "macos": [
-            {"distro-slug": "macos-12", "arch": "x86_64"},
-        ],
-        "windows": [
-            {"distro-slug": "windows-2022", "arch": "amd64"},
-        ],
-    }
-    if gh_event["repository"]["fork"] is not True:
-        _matrix["macos"].append(
-            {
-                "distro-slug": "macos-13-arm64",
-                "arch": "arm64",
-            }
-        )
-
-    ctx.info("Generated matrix:")
-    ctx.print(_matrix, soft_wrap=True)
-
-    if github_output is not None:
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"matrix={json.dumps(_matrix)}\n")
-    ctx.exit(0)
-
-
-@ci.command(name="pkg-downloads-matrix")
-def get_pkg_downloads_matrix(ctx: Context):
-    gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
-    if gh_event_path is None:
-        ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert gh_event_path is not None
-
-    github_output = os.environ.get("GITHUB_OUTPUT")
-    if github_output is None:
-        ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert github_output is not None
-
-    gh_event = None
-    try:
-        gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
-    except Exception as exc:
-        ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
-        ctx.exit(1)
-
-    if TYPE_CHECKING:
-        assert gh_event is not None
-
-    _matrix: dict[str, list[dict[str, str]]] = {
-        "linux": [],
-        "macos": [],
-        "windows": [],
-    }
-
-    rpm_slugs = (
-        "rockylinux",
-        "amazonlinux",
-        "fedora",
-        "photon",
-    )
-    linux_skip_pkg_download_tests = (
-        "archlinux-lts",
-        "opensuse-15",
-        "windows",
-    )
-    for slug in sorted(tools.utils.get_golden_images()):
-        if slug.startswith(linux_skip_pkg_download_tests):
-            continue
-        if "arm64" in slug:
-            arch = "arm64"
-        else:
-            arch = "x86_64"
-        if slug.startswith(rpm_slugs) and arch == "arm64":
-            # While we maintain backwards compatible urls
-            _matrix["linux"].append(
-                {"distro-slug": slug, "arch": "aarch64", "pkg-type": "package"}
-            )
-        _matrix["linux"].append(
-            {"distro-slug": slug, "arch": arch, "pkg-type": "package"}
-        )
-        if slug.startswith("ubuntu-22"):
-            _matrix["linux"].append(
-                {"distro-slug": slug, "arch": arch, "pkg-type": "onedir"}
-            )
-    for mac in TEST_SALT_LISTING["macos"]:
-        if gh_event["repository"]["fork"] is True and mac.arch == "arm64":
-            continue
-        _matrix["macos"].append(
-            {"distro-slug": mac.slug, "arch": mac.arch, "pkg-type": "package"}
-        )
-
-    if gh_event["repository"]["fork"] is True:
-        macos_idx = 0  # macos-12
-    else:
-        macos_idx = 1  # macos-13
-    _matrix["macos"].append(
-        {
-            "distro-slug": TEST_SALT_LISTING["macos"][macos_idx].slug,
-            "arch": TEST_SALT_LISTING["macos"][macos_idx].arch,
-            "pkg-type": "onedir",
-        }
-    )
-
-    for win in TEST_SALT_LISTING["windows"][-1:]:
-        for pkg_type in ("nsis", "msi", "onedir"):
-            _matrix["windows"].append(
-                {
-                    "distro-slug": win.slug,
-                    "arch": win.arch,
-                    "pkg-type": pkg_type,
-                }
-            )
-
-    ctx.info("Generated matrix:")
-    ctx.print(_matrix, soft_wrap=True)
-
-    if github_output is not None:
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(f"matrix={json.dumps(_matrix)}\n")
-    ctx.exit(0)
+    _matrix = [{"arch": "x86_64"}]
+    if os_kind == "windows":
+        _matrix = [
+            {"arch": "amd64"},
+            {"arch": "x86"},
+        ]
+    elif os_kind == "macos":
+        _matrix.append({"arch": "arm64"})
+    elif os_kind == "linux" and linux_arm_runner:
+        _matrix.append({"arch": "arm64"})
+    return _matrix
 
 
 @ci.command(
@@ -1253,7 +513,7 @@ def get_pr_test_labels(
                         f"The '{slug}' slug exists as a label but not as an available OS."
                     )
                 selected.add(slug)
-                if slug != "all":
+                if slug != "all" and slug in available:
                     available.remove(slug)
                 continue
             test_labels.append(name)
@@ -1402,13 +662,11 @@ def get_testing_releases(
     majors = sorted(
         list(
             {
+                # We aren't testing upgrades from anything before 3006.0
+                # and we don't want to test 3007.? on the 3006.x branch
                 version.major
                 for version in releases
-                # We aren't testing upgrades from anything before
-                # 3006.0 except the latest 3005.x
-                if version.major >= 3005
-                # We don't want to test 3007.? on the 3006.x branch
-                and version.major <= parsed_salt_version.major
+                if version.major > 3005 and version.major <= parsed_salt_version.major
             }
         )
     )[-num_major_versions:]
@@ -1618,3 +876,548 @@ def upload_coverage(ctx: Context, reports_path: pathlib.Path, commit_sha: str =
             time.sleep(sleep_time)
 
     ctx.exit(0)
+
+
+def _os_test_filter(osdef, transport, chunk, arm_runner, requested_slugs):
+    """
+    Filter out some test runs based on os, tranport and chunk to be run.
+    """
+    if osdef.slug not in requested_slugs:
+        return False
+    if transport == "tcp" and chunk in ("unit", "functional"):
+        return False
+    if "macos" in osdef.slug and chunk == "scenarios":
+        return False
+    if not arm_runner:
+        return False
+    if transport == "tcp" and osdef.slug not in (
+        "rockylinux-9",
+        "rockylinux-9-arm64",
+        "photonos-5",
+        "photonos-5-arm64",
+        "ubuntu-22.04",
+        "ubuntu-22.04-arm64",
+    ):
+        return False
+    return True
+
+
+def _define_testrun(ctx, changed_files, labels, full):
+    if not changed_files.exists():
+        ctx.error(f"The '{changed_files}' file does not exist.")
+        ctx.error(
+            "FYI, the command 'tools process-changed-files <changed-files-path>' "
+            "needs to run prior to this one."
+        )
+        ctx.exit(1)
+    try:
+        changed_files_contents = json.loads(changed_files.read_text())
+    except Exception as exc:
+        ctx.error(f"Could not load the changed files from '{changed_files}': {exc}")
+        ctx.exit(1)
+
+    # Based on which files changed, or other things like PR labels we can
+    # decide what to run, or even if the full test run should be running on the
+    # pull request, etc...
+    changed_pkg_requirements_files: list[str] = []
+    changed_test_requirements_files: list[str] = []
+    if "pkg_requirements_files" in changed_files_contents:
+        changed_pkg_requirements_files = json.loads(
+            changed_files_contents["pkg_requirements_files"]
+        )
+    if "test_requirements_files" in changed_files_contents:
+        changed_test_requirements_files = json.loads(
+            changed_files_contents["test_requirements_files"]
+        )
+    if full:
+        ctx.info("Full test run chosen")
+        testrun = TestRun(type="full", skip_code_coverage=True)
+    elif changed_pkg_requirements_files or changed_test_requirements_files:
+        ctx.info(
+            "Full test run chosen because there was a change made "
+            "to the requirements files."
+        )
+        testrun = TestRun(type="full", skip_code_coverage=True)
+    elif "test:full" in labels:
+        ctx.info("Full test run chosen because the label `test:full` is set.\n")
+        testrun = TestRun(type="full", skip_code_coverage=True)
+    else:
+        testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
+        testrun = TestRun(
+            type="changed",
+            skip_code_coverage=True,
+            from_filenames=str(
+                testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
+            ),
+        )
+        ctx.info(f"Writing {testrun_changed_files_path.name} ...")
+        selected_changed_files = []
+        for fpath in json.loads(changed_files_contents["testrun_files"]):
+            if fpath.startswith(("tools/", "tasks/")):
+                continue
+            if fpath in ("noxfile.py",):
+                continue
+            if fpath == "tests/conftest.py":
+                # In this particular case, just run the full test suite
+                testrun["type"] = "full"
+                ctx.info(
+                    f"Full test run chosen because there was a change to `{fpath}`."
+                )
+            selected_changed_files.append(fpath)
+        testrun_changed_files_path.write_text("\n".join(sorted(selected_changed_files)))
+        if testrun["type"] == "changed":
+            testrun["selected_tests"] = {
+                "core": False,
+                "slow": False,
+                "fast": True,
+                "flaky": False,
+            }
+            if "test:slow" in labels:
+                ctx.info("Slow tests chosen by `test:slow` label.")
+                testrun["selected_tests"]["slow"] = True
+            if "test:core" in labels:
+                ctx.info("Core tests chosen by `test:core` label.")
+                testrun["selected_tests"]["core"] = True
+            if "test:no-fast" in labels:
+                ctx.info("Fast tests deselected by `test:no-fast` label.")
+                testrun["selected_tests"]["fast"] = False
+            if "test:flaky-jail" in labels:
+                ctx.info("Flaky jailed tests chosen by `test:flaky-jail` label.")
+                testrun["selected_tests"]["flaky"] = True
+    return testrun
+
+
+def _environment_slugs(ctx, slugdef, labels):
+    """
+    Based a slugs defenition from our environment and labels for a pr, return
+    the requeted slugs for a testrun.
+
+    Environment slug defenitions can be a comma separated list. An "all" item
+    in the list will include all os and package slugs.
+    """
+    if isinstance(slugdef, list):
+        requests = slugdef
+    else:
+        requests = [_.strip().lower() for _ in slugdef.split(",") if _.strip()]
+    label_requests = [
+        _[0].rsplit(":", 1)[1] for _ in labels if _[0].startswith("test:os:")
+    ]
+    all_slugs = []
+    slugs = set()
+    for platform in TEST_SALT_LISTING:
+        for osdef in TEST_SALT_LISTING[platform]:
+            all_slugs.append(osdef.slug)
+    for platform in TEST_SALT_LISTING:
+        for osdef in TEST_SALT_LISTING[platform]:
+            all_slugs.append(osdef.slug)
+    if "all" in requests:
+        slugs = all_slugs[:]
+        requests.remove("all")
+    if "all" in label_requests:
+        slugs = all_slugs[:]
+        label_requests.remove("all")
+    for request in requests[:]:
+        if request.startswith("+"):
+            request = request.strip("+")
+            if request not in all_slugs:
+                ctx.warn(f"invalid slug name from environment {request}")
+                continue
+            if request in slugs:
+                ctx.info("slug already requested from environment {request}")
+                continue
+            slugs.add(request)
+        elif request.startswith("-"):
+            request = request.strip("-")
+            if request not in all_slugs:
+                ctx.warn(f"invalid slug name from environment {request}")
+                continue
+            if request in slugs:
+                slugs.remove(request)
+            else:
+                ctx.info("slug from environment was never requested {request}")
+        else:
+            if request not in all_slugs:
+                ctx.warn(f"invalid slug name from environment {request}")
+                continue
+            if request in slugs:
+                ctx.info("slug from environment already requested {request}")
+                continue
+            slugs.add(request)
+
+    for label in label_requests:
+        if label not in all_slugs:
+            ctx.warn(f"invalid slug name from label {label}")
+            continue
+        if label in slugs:
+            ctx.info(f"slug from labels already requested {label}")
+            continue
+        slugs.add(label)
+
+    return list(slugs)
+
+
+@ci.command(
+    name="workflow-config",
+    arguments={
+        "salt_version": {
+            "help": "The version of salt being tested against",
+        },
+        "event_name": {
+            "help": "The name of the GitHub event being processed.",
+        },
+        "skip_tests": {
+            "help": "Skip running the Salt tests",
+        },
+        "skip_pkg_tests": {
+            "help": "Skip running the Salt Package tests",
+        },
+        "skip_pkg_download_tests": {
+            "help": "Skip running the Salt Package download tests",
+        },
+        "changed_files": {
+            "help": (
+                "Path to '.json' file containing the payload of changed files "
+                "from the 'dorny/paths-filter' GitHub action."
+            ),
+        },
+    },
+)
+def workflow_config(
+    ctx: Context,
+    salt_version: str,
+    event_name: str,
+    changed_files: pathlib.Path,
+    skip_tests: bool = False,
+    skip_pkg_tests: bool = False,
+    skip_pkg_download_tests: bool = False,
+):
+    full = False
+    gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
+    gh_event: dict[str, Any] = {}
+    config: dict[str, Any] = {}
+    labels: list[tuple[str, str]] = []
+    slugs: list[str] = []
+
+    ctx.info(f"{'==== environment ====':^80s}")
+    ctx.info(f"{pprint.pformat(dict(os.environ))}")
+    ctx.info(f"{'==== end environment ====':^80s}")
+    ctx.info(f"Github event path is {gh_event_path}")
+
+    if gh_event_path is None:
+        config["linux_arm_runner"] = ""
+    else:
+        try:
+            gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
+        except Exception as exc:
+            ctx.error(
+                f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc
+            )
+            ctx.exit(1)
+
+        if "pull_request" in gh_event:
+            pr = gh_event["pull_request"]["number"]
+            labels = _get_pr_test_labels_from_event_payload(gh_event)
+        else:
+            ctx.warn("The 'pull_request' key was not found on the event payload.")
+
+        if gh_event["repository"]["private"]:
+            # Private repositories need arm runner configuration environment
+            # variable.
+            if os.environ.get("LINUX_ARM_RUNNER", "0") in ("0", ""):
+                config["linux_arm_runner"] = ""
+            else:
+                config["linux_arm_runner"] = os.environ["LINUX_ARM_RUNNER"]
+        else:
+            # Public repositories can use github's arm64 runners.
+            config["linux_arm_runner"] = "ubuntu-24.04-arm"
+
+    if event_name != "pull_request" or "test:full" in [_[0] for _ in labels]:
+        full = True
+        requested_slugs = _environment_slugs(
+            ctx,
+            tools.utils.get_cicd_shared_context()["full-testrun-slugs"],
+            labels,
+        )
+    else:
+        requested_slugs = _environment_slugs(
+            ctx,
+            tools.utils.get_cicd_shared_context()["pr-testrun-slugs"],
+            labels,
+        )
+
+    ctx.info(f"{'==== requested slugs ====':^80s}")
+    ctx.info(f"{pprint.pformat(requested_slugs)}")
+    ctx.info(f"{'==== end requested slugs ====':^80s}")
+
+    ctx.info(f"{'==== labels ====':^80s}")
+    ctx.info(f"{pprint.pformat(labels)}")
+    ctx.info(f"{'==== end labels ====':^80s}")
+
+    config["skip_code_coverage"] = True
+    if "test:coverage" in labels:
+        config["skip_code_coverage"] = False
+    else:
+        ctx.info("Skipping code coverage.")
+
+    ctx.info(f"{'==== github event ====':^80s}")
+    ctx.info(f"{pprint.pformat(gh_event)}")
+    ctx.info(f"{'==== end github event ====':^80s}")
+
+    config["testrun"] = _define_testrun(ctx, changed_files, labels, full)
+
+    ctx.info(f"{'==== testrun ====':^80s}")
+    ctx.info(f"{pprint.pformat(config['testrun'])}")
+    ctx.info(f"{'==== testrun ====':^80s}")
+
+    jobs = {
+        "lint": True,
+        "test": True,
+        "test-pkg": True,
+        "test-pkg-download": True,
+        "prepare-release": True,
+        "build-docs": True,
+        "build-source-tarball": True,
+        "build-deps-onedir": True,
+        "build-salt-onedir": True,
+        "build-pkgs": True,
+        "build-deps-ci": True if requested_slugs else False,
+    }
+
+    platforms: list[Literal["linux", "macos", "windows"]] = [
+        "linux",
+        "macos",
+        "windows",
+    ]
+
+    if skip_pkg_download_tests:
+        jobs["test-pkg-download"] = False
+
+    config["jobs"] = jobs
+    config["build-matrix"] = {
+        platform: _build_matrix(platform, config["linux_arm_runner"])
+        for platform in platforms
+    }
+    ctx.info(f"{'==== build matrix ====':^80s}")
+    ctx.info(f"{pprint.pformat(config['build-matrix'])}")
+    ctx.info(f"{'==== end build matrix ====':^80s}")
+    config["artifact-matrix"] = []
+    for platform in platforms:
+        config["artifact-matrix"] += [
+            dict({"platform": platform}, **_) for _ in config["build-matrix"][platform]
+        ]
+    ctx.info(f"{'==== artifact matrix ====':^80s}")
+    ctx.info(f"{pprint.pformat(config['artifact-matrix'])}")
+    ctx.info(f"{'==== end artifact matrix ====':^80s}")
+
+    # Get salt releases.
+    releases = tools.utils.get_salt_releases(ctx)
+    str_releases = [str(version) for version in releases]
+    latest = str_releases[-1]
+
+    # Get testing releases.
+    parsed_salt_version = tools.utils.Version(salt_version)
+    # We want the latest 4 major versions, removing the oldest if this version is a new major
+    num_major_versions = 4
+    if parsed_salt_version.minor == 0:
+        num_major_versions = 3
+    majors = sorted(
+        list(
+            {
+                # We aren't testing upgrades from anything before 3006.0
+                # and we don't want to test 3007.? on the 3006.x branch
+                version.major
+                for version in releases
+                if version.major > 3005 and version.major <= parsed_salt_version.major
+            }
+        )
+    )[-num_major_versions:]
+    testing_releases = []
+    # Append the latest minor for each major
+    for major in majors:
+        minors_of_major = [version for version in releases if version.major == major]
+        testing_releases.append(minors_of_major[-1])
+    str_releases = [str(version) for version in testing_releases]
+    ctx.info(f"str_releases {str_releases}")
+
+    pkg_test_matrix: dict[str, list] = {_: [] for _ in platforms}
+
+    if not config["linux_arm_runner"]:
+        # Filter out linux arm tests because we are on a private repository and
+        # no arm64 runner is defined.
+        TEST_SALT_LISTING["linux"] = list(
+            filter(lambda x: x.arch != "arm64", TEST_SALT_LISTING["linux"])
+        )
+        TEST_SALT_PKG_LISTING["linux"] = list(
+            filter(lambda x: x.arch != "arm64", TEST_SALT_PKG_LISTING["linux"])
+        )
+    if not skip_pkg_tests:
+        for platform in platforms:
+            pkg_test_matrix[platform] = [
+                dict(
+                    {
+                        "tests-chunk": "install",
+                        "version": None,
+                    },
+                    **_.as_dict(),
+                )
+                for _ in TEST_SALT_PKG_LISTING[platform]
+                if _.slug in requested_slugs
+            ]
+        for version in str_releases:
+            for platform in platforms:
+                pkg_test_matrix[platform] += [
+                    dict(
+                        {
+                            "tests-chunk": "upgrade",
+                            "version": version,
+                        },
+                        **_.as_dict(),
+                    )
+                    for _ in TEST_SALT_PKG_LISTING[platform]
+                    if _.slug in requested_slugs
+                ]
+                # Skipping downgrade tests on windows. These tests have never
+                # been run and currently fail. This should be fixed.
+                if platform == "windows":
+                    continue
+                pkg_test_matrix[platform] += [
+                    dict(
+                        {
+                            "tests-chunk": "downgrade",
+                            "version": version,
+                        },
+                        **_.as_dict(),
+                    )
+                    for _ in TEST_SALT_PKG_LISTING[platform]
+                    if _.slug in requested_slugs
+                ]
+    ctx.info(f"{'==== pkg test matrix ====':^80s}")
+    ctx.info(f"{pprint.pformat(pkg_test_matrix)}")
+    ctx.info(f"{'==== end pkg test matrix ====':^80s}")
+
+    # We need to be careful about how many chunks we make. We are limitied to
+    # 256 items in a matrix.
+    _splits = {
+        "functional": 4,
+        "integration": 7,
+        "scenarios": 1,
+        "unit": 4,
+    }
+
+    test_matrix: dict[str, list] = {}
+    if not skip_tests:
+        for platform in platforms:
+            for transport in ("zeromq", "tcp"):
+                for chunk in ("unit", "functional", "integration", "scenarios"):
+                    splits = _splits.get(chunk) or 1
+                    if full and splits > 1:
+                        for split in range(1, splits + 1):
+                            if platform != "linux":
+                                if platform not in test_matrix:
+                                    test_matrix[platform] = []
+                                test_matrix[platform] += [
+                                    dict(
+                                        {
+                                            "transport": transport,
+                                            "tests-chunk": chunk,
+                                            "test-group": split,
+                                            "test-group-count": splits,
+                                        },
+                                        **_.as_dict(),
+                                    )
+                                    for _ in TEST_SALT_LISTING[platform]
+                                    if _os_test_filter(
+                                        _,
+                                        transport,
+                                        chunk,
+                                        config["linux_arm_runner"],
+                                        requested_slugs,
+                                    )
+                                ]
+                            else:
+                                for arch in ["x86_64", "arm64"]:
+                                    if f"{platform}-{arch}" not in test_matrix:
+                                        test_matrix[f"{platform}-{arch}"] = []
+                                    test_matrix[f"{platform}-{arch}"] += [
+                                        dict(
+                                            {
+                                                "transport": transport,
+                                                "tests-chunk": chunk,
+                                                "test-group": split,
+                                                "test-group-count": splits,
+                                            },
+                                            **_.as_dict(),
+                                        )
+                                        for _ in TEST_SALT_LISTING[platform]
+                                        if _os_test_filter(
+                                            _,
+                                            transport,
+                                            chunk,
+                                            config["linux_arm_runner"],
+                                            requested_slugs,
+                                        )
+                                        and _.arch == arch
+                                    ]
+                    else:
+                        if platform != "linux":
+                            if platform not in test_matrix:
+                                test_matrix[platform] = []
+                            test_matrix[platform] += [
+                                dict(
+                                    {"transport": transport, "tests-chunk": chunk},
+                                    **_.as_dict(),
+                                )
+                                for _ in TEST_SALT_LISTING[platform]
+                                if _os_test_filter(
+                                    _,
+                                    transport,
+                                    chunk,
+                                    config["linux_arm_runner"],
+                                    requested_slugs,
+                                )
+                            ]
+                        else:
+                            for arch in ["x86_64", "arm64"]:
+                                if f"{platform}-{arch}" not in test_matrix:
+                                    test_matrix[f"{platform}-{arch}"] = []
+                                test_matrix[f"{platform}-{arch}"] += [
+                                    dict(
+                                        {"transport": transport, "tests-chunk": chunk},
+                                        **_.as_dict(),
+                                    )
+                                    for _ in TEST_SALT_LISTING[platform]
+                                    if _os_test_filter(
+                                        _,
+                                        transport,
+                                        chunk,
+                                        config["linux_arm_runner"],
+                                        requested_slugs,
+                                    )
+                                    and _.arch == arch
+                                ]
+
+    for key in test_matrix:
+        if len(test_matrix[key]) > 256:
+            ctx.warn(
+                f"Number of jobs in {platform} test matrix exceeds 256 ({len(test_matrix[key])}), jobs may not run."
+            )
+
+    ctx.info(f"{'==== test matrix ====':^80s}")
+    ctx.info(f"{pprint.pformat(test_matrix)}")
+    ctx.info(f"{'==== end test matrix ====':^80s}")
+    config["pkg-test-matrix"] = pkg_test_matrix
+    config["test-matrix"] = test_matrix
+    ctx.info("Jobs selected are")
+    for x, y in jobs.items():
+        ctx.info(f"{x} = {y}")
+    github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY")
+    if github_step_summary is not None:
+        with open(github_step_summary, "a", encoding="utf-8") as wfh:
+            wfh.write("Selected Jobs:\n")
+            for name, value in sorted(jobs.items()):
+                wfh.write(f" - `{name}`: {value}\n")
+    github_output = os.environ.get("GITHUB_OUTPUT")
+    if github_output is not None:
+        with open(github_output, "a", encoding="utf-8") as wfh:
+            wfh.write(f"config={json.dumps(config)}\n")
+    ctx.exit(0)
diff --git a/tools/docs.py b/tools/docs.py
index 3471ea1e8f2..f62d1aa6a60 100644
--- a/tools/docs.py
+++ b/tools/docs.py
@@ -146,51 +146,6 @@ def html(
             )
 
 
-@docs.command(
-    name="epub",
-    arguments={
-        "no_clean": {
-            "help": "Don't cleanup prior to building",
-        },
-        "no_color": {
-            "help": "Disable colored output.",
-        },
-    },
-)
-def epub(ctx: Context, no_clean: bool = False, no_color: bool = False):
-    if no_clean is False:
-        ctx.run("make", "clean", cwd="doc/", check=True)
-    opts = [
-        "-j",
-        "auto",
-        "--keep-going",
-    ]
-    if no_color is False:
-        opts.append("--color")
-    ctx.run(
-        "make",
-        "epub",
-        f"SPHINXOPTS={' '.join(opts)}",
-        cwd="doc/",
-        check=True,
-    )
-
-    artifact = tools.utils.REPO_ROOT / "doc" / "_build" / "epub" / "Salt.epub"
-    if "LATEST_RELEASE" in os.environ:
-        shutil.move(
-            artifact, artifact.parent / f"Salt-{os.environ['LATEST_RELEASE']}.epub"
-        )
-        artifact = artifact.parent / f"Salt-{os.environ['LATEST_RELEASE']}.epub"
-    github_output = os.environ.get("GITHUB_OUTPUT")
-    if github_output is not None:
-        with open(github_output, "a", encoding="utf-8") as wfh:
-            wfh.write(
-                "has-artifacts=true\n"
-                f"artifact-name={artifact.resolve().name}\n"
-                f"artifact-path={artifact.resolve()}\n"
-            )
-
-
 @docs.command(
     name="pdf",
     arguments={
diff --git a/tools/precommit/docstrings.py b/tools/precommit/docstrings.py
index fa1d435833b..52d6bd7f961 100644
--- a/tools/precommit/docstrings.py
+++ b/tools/precommit/docstrings.py
@@ -345,7 +345,6 @@ MISSING_DOCSTRINGS = {
         "get_pillars",
         "expand_variables",
         "render_jinja",
-        "expand_classes_in_order",
         "dict_search_and_replace",
         "expanded_dict_from_minion",
         "find_value_to_expand",
diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py
index bd1fdca76df..d4884b5da3f 100644
--- a/tools/precommit/workflows.py
+++ b/tools/precommit/workflows.py
@@ -13,120 +13,21 @@ from jinja2 import Environment, FileSystemLoader, StrictUndefined
 from ptscripts import Context, command_group
 
 import tools.utils
-from tools.utils import Linux, MacOS, PlatformDefinitions, Windows
+from tools.utils import (
+    Linux,
+    LinuxPkg,
+    MacOS,
+    MacOSPkg,
+    PlatformDefinitions,
+    Windows,
+    WindowsPkg,
+)
 
 log = logging.getLogger(__name__)
 
 WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows"
 TEMPLATES = WORKFLOWS / "templates"
 
-TEST_SALT_LISTING = PlatformDefinitions(
-    {
-        "linux": [
-            Linux(slug="rockylinux-8", display_name="Rocky Linux 8", arch="x86_64"),
-            Linux(
-                slug="rockylinux-8-arm64",
-                display_name="Rocky Linux 8 Arm64",
-                arch="arm64",
-            ),
-            Linux(slug="rockylinux-9", display_name="Rocky Linux 9", arch="x86_64"),
-            Linux(
-                slug="rockylinux-9-arm64",
-                display_name="Rocky Linux 9 Arm64",
-                arch="arm64",
-            ),
-            Linux(slug="amazonlinux-2", display_name="Amazon Linux 2", arch="x86_64"),
-            Linux(
-                slug="amazonlinux-2-arm64",
-                display_name="Amazon Linux 2 Arm64",
-                arch="arm64",
-            ),
-            Linux(
-                slug="amazonlinux-2023",
-                display_name="Amazon Linux 2023",
-                arch="x86_64",
-            ),
-            Linux(
-                slug="amazonlinux-2023-arm64",
-                display_name="Amazon Linux 2023 Arm64",
-                arch="arm64",
-            ),
-            Linux(slug="archlinux-lts", display_name="Arch Linux LTS", arch="x86_64"),
-            Linux(slug="debian-11", display_name="Debian 11", arch="x86_64"),
-            Linux(slug="debian-11-arm64", display_name="Debian 11 Arm64", arch="arm64"),
-            Linux(slug="debian-12", display_name="Debian 12", arch="x86_64"),
-            Linux(slug="debian-12-arm64", display_name="Debian 12 Arm64", arch="arm64"),
-            Linux(slug="fedora-40", display_name="Fedora 40", arch="x86_64"),
-            Linux(slug="opensuse-15", display_name="Opensuse 15", arch="x86_64"),
-            Linux(slug="photonos-4", display_name="Photon OS 4", arch="x86_64"),
-            Linux(
-                slug="photonos-4-arm64", display_name="Photon OS 4 Arm64", arch="arm64"
-            ),
-            Linux(
-                slug="photonos-4",
-                display_name="Photon OS 4",
-                arch="x86_64",
-                fips=True,
-            ),
-            Linux(
-                slug="photonos-4-arm64",
-                display_name="Photon OS 4 Arm64",
-                arch="arm64",
-                fips=True,
-            ),
-            Linux(slug="photonos-5", display_name="Photon OS 5", arch="x86_64"),
-            Linux(
-                slug="photonos-5-arm64", display_name="Photon OS 5 Arm64", arch="arm64"
-            ),
-            Linux(
-                slug="photonos-5",
-                display_name="Photon OS 5",
-                arch="x86_64",
-                fips=True,
-            ),
-            Linux(
-                slug="photonos-5-arm64",
-                display_name="Photon OS 5 Arm64",
-                arch="arm64",
-                fips=True,
-            ),
-            Linux(slug="ubuntu-20.04", display_name="Ubuntu 20.04", arch="x86_64"),
-            Linux(
-                slug="ubuntu-20.04-arm64",
-                display_name="Ubuntu 20.04 Arm64",
-                arch="arm64",
-            ),
-            Linux(slug="ubuntu-22.04", display_name="Ubuntu 22.04", arch="x86_64"),
-            Linux(
-                slug="ubuntu-22.04-arm64",
-                display_name="Ubuntu 22.04 Arm64",
-                arch="arm64",
-            ),
-            Linux(slug="ubuntu-24.04", display_name="Ubuntu 24.04", arch="x86_64"),
-            Linux(
-                slug="ubuntu-24.04-arm64",
-                display_name="Ubuntu 24.04 Arm64",
-                arch="arm64",
-            ),
-        ],
-        "macos": [
-            MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"),
-            MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"),
-            MacOS(
-                slug="macos-13-arm64",
-                display_name="macOS 13 Arm64",
-                arch="arm64",
-                runner="macos-13-xlarge",
-            ),
-        ],
-        "windows": [
-            Windows(slug="windows-2016", display_name="Windows 2016", arch="amd64"),
-            Windows(slug="windows-2019", display_name="Windows 2019", arch="amd64"),
-            Windows(slug="windows-2022", display_name="Windows 2022", arch="amd64"),
-        ],
-    }
-)
-
 # Define the command group
 cgroup = command_group(
     name="workflows",
@@ -135,6 +36,414 @@ cgroup = command_group(
     parent="pre-commit",
 )
 
+# Testing platforms
+TEST_SALT_LISTING = PlatformDefinitions(
+    {
+        "linux": [
+            Linux(
+                slug="rockylinux-8",
+                display_name="Rocky Linux 8",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-8",
+            ),
+            Linux(
+                slug="rockylinux-8-arm64",
+                display_name="Rocky Linux 8 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-8",
+            ),
+            Linux(
+                slug="rockylinux-9",
+                display_name="Rocky Linux 9",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-9",
+            ),
+            Linux(
+                slug="rockylinux-9-arm64",
+                display_name="Rocky Linux 9 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-9",
+            ),
+            Linux(
+                slug="amazonlinux-2",
+                display_name="Amazon Linux 2",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2",
+            ),
+            Linux(
+                slug="amazonlinux-2-arm64",
+                display_name="Amazon Linux 2 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2",
+            ),
+            Linux(
+                slug="amazonlinux-2023",
+                display_name="Amazon Linux 2023",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2023",
+            ),
+            Linux(
+                slug="amazonlinux-2023-arm64",
+                display_name="Amazon Linux 2023 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2023",
+            ),
+            Linux(
+                slug="debian-11",
+                display_name="Debian 11",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-11",
+            ),
+            Linux(
+                slug="debian-11-arm64",
+                display_name="Debian 11 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-11",
+            ),
+            Linux(
+                slug="debian-12",
+                display_name="Debian 12",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-12",
+            ),
+            Linux(
+                slug="debian-12-arm64",
+                display_name="Debian 12 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-12",
+            ),
+            Linux(
+                slug="fedora-40",
+                display_name="Fedora 40",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:fedora-40",
+            ),
+            # Linux(slug="opensuse-15", display_name="Opensuse 15", arch="x86_64"),
+            Linux(
+                slug="photonos-4",
+                display_name="Photon OS 4",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            Linux(
+                slug="photonos-4-arm64",
+                display_name="Photon OS 4 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            Linux(
+                slug="photonos-4",
+                display_name="Photon OS 4",
+                arch="x86_64",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            Linux(
+                slug="photonos-4-arm64",
+                display_name="Photon OS 4 Arm64",
+                arch="arm64",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            Linux(
+                slug="photonos-5",
+                display_name="Photon OS 5",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            Linux(
+                slug="photonos-5-arm64",
+                display_name="Photon OS 5 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            Linux(
+                slug="photonos-5",
+                display_name="Photon OS 5",
+                arch="x86_64",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            Linux(
+                slug="photonos-5-arm64",
+                display_name="Photon OS 5 Arm64",
+                arch="arm64",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            Linux(
+                slug="ubuntu-20.04",
+                display_name="Ubuntu 20.04",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-20.04",
+            ),
+            Linux(
+                slug="ubuntu-20.04-arm64",
+                display_name="Ubuntu 20.04 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-20.04",
+            ),
+            Linux(
+                slug="ubuntu-22.04",
+                display_name="Ubuntu 22.04",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-22.04",
+            ),
+            Linux(
+                slug="ubuntu-22.04-arm64",
+                display_name="Ubuntu 22.04 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-22.04",
+            ),
+            Linux(
+                slug="ubuntu-24.04",
+                display_name="Ubuntu 24.04",
+                arch="x86_64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-24.04",
+            ),
+            Linux(
+                slug="ubuntu-24.04-arm64",
+                display_name="Ubuntu 24.04 Arm64",
+                arch="arm64",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-24.04",
+            ),
+        ],
+        "macos": [
+            MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"),
+            MacOS(slug="macos-14", display_name="macOS 14 (M1)", arch="arm64"),
+            MacOS(slug="macos-15", display_name="macOS 15 (M1)", arch="arm64"),
+        ],
+        "windows": [
+            # Windows(slug="windows-2016", display_name="Windows 2016", arch="amd64"),
+            Windows(slug="windows-2019", display_name="Windows 2019", arch="amd64"),
+            Windows(slug="windows-2022", display_name="Windows 2022", arch="amd64"),
+        ],
+    }
+)
+TEST_SALT_PKG_LISTING = PlatformDefinitions(
+    {
+        "linux": [
+            LinuxPkg(
+                slug="rockylinux-8",
+                display_name="Rocky Linux 8",
+                arch="x86_64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-8",
+            ),
+            LinuxPkg(
+                slug="rockylinux-8-arm64",
+                display_name="Rocky Linux 8 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-8",
+            ),
+            LinuxPkg(
+                slug="rockylinux-9",
+                display_name="Rocky Linux 9",
+                arch="x86_64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-9",
+            ),
+            LinuxPkg(
+                slug="rockylinux-9-arm64",
+                display_name="Rocky Linux 9 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:rockylinux-9",
+            ),
+            LinuxPkg(
+                slug="amazonlinux-2",
+                display_name="Amazon Linux 2",
+                arch="x86_64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2",
+            ),
+            LinuxPkg(
+                slug="amazonlinux-2-arm64",
+                display_name="Amazon Linux 2 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2",
+            ),
+            LinuxPkg(
+                slug="amazonlinux-2023",
+                display_name="Amazon Linux 2023",
+                arch="x86_64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2023",
+            ),
+            LinuxPkg(
+                slug="amazonlinux-2023-arm64",
+                display_name="Amazon Linux 2023 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:amazonlinux-2023",
+            ),
+            LinuxPkg(
+                slug="debian-11",
+                display_name="Debian 11",
+                arch="x86_64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-11",
+            ),
+            LinuxPkg(
+                slug="debian-11-arm64",
+                display_name="Debian 11 Arm64",
+                arch="arm64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-11",
+            ),
+            LinuxPkg(
+                slug="debian-12",
+                display_name="Debian 12",
+                arch="x86_64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-12",
+            ),
+            LinuxPkg(
+                slug="debian-12-arm64",
+                display_name="Debian 12 Arm64",
+                arch="arm64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:debian-12",
+            ),
+            LinuxPkg(
+                slug="photonos-4",
+                display_name="Photon OS 4",
+                arch="x86_64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            LinuxPkg(
+                slug="photonos-4-arm64",
+                display_name="Photon OS 4 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            LinuxPkg(
+                slug="photonos-4",
+                display_name="Photon OS 4",
+                arch="x86_64",
+                pkg_type="rpm",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            LinuxPkg(
+                slug="photonos-4-arm64",
+                display_name="Photon OS 4 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-4",
+            ),
+            LinuxPkg(
+                slug="photonos-5",
+                display_name="Photon OS 5",
+                arch="x86_64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            LinuxPkg(
+                slug="photonos-5-arm64",
+                display_name="Photon OS 5 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            LinuxPkg(
+                slug="photonos-5",
+                display_name="Photon OS 5",
+                arch="x86_64",
+                pkg_type="rpm",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            LinuxPkg(
+                slug="photonos-5-arm64",
+                display_name="Photon OS 5 Arm64",
+                arch="arm64",
+                pkg_type="rpm",
+                fips=True,
+                container="ghcr.io/saltstack/salt-ci-containers/testing:photon-5",
+            ),
+            LinuxPkg(
+                slug="ubuntu-20.04",
+                display_name="Ubuntu 20.04",
+                arch="x86_64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-20.04",
+            ),
+            LinuxPkg(
+                slug="ubuntu-20.04-arm64",
+                display_name="Ubuntu 20.04 Arm64",
+                arch="arm64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-20.04",
+            ),
+            LinuxPkg(
+                slug="ubuntu-22.04",
+                display_name="Ubuntu 22.04",
+                arch="x86_64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-22.04",
+            ),
+            LinuxPkg(
+                slug="ubuntu-22.04-arm64",
+                display_name="Ubuntu 22.04 Arm64",
+                arch="arm64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-22.04",
+            ),
+            LinuxPkg(
+                slug="ubuntu-24.04",
+                display_name="Ubuntu 24.04",
+                arch="x86_64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-24.04",
+            ),
+            LinuxPkg(
+                slug="ubuntu-24.04-arm64",
+                display_name="Ubuntu 24.04 Arm64",
+                arch="arm64",
+                pkg_type="deb",
+                container="ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-24.04",
+            ),
+        ],
+        "macos": [
+            MacOSPkg(slug="macos-13", display_name="macOS 13", arch="x86_64"),
+            MacOSPkg(slug="macos-14", display_name="macOS 14 (M1)", arch="arm64"),
+            MacOSPkg(slug="macos-15", display_name="macOS 15 (M1)", arch="arm64"),
+        ],
+        "windows": [
+            WindowsPkg(
+                slug="windows-2019",
+                display_name="Windows 2019",
+                arch="amd64",
+                pkg_type="NSIS",
+            ),
+            WindowsPkg(
+                slug="windows-2019",
+                display_name="Windows 2019",
+                arch="amd64",
+                pkg_type="MSI",
+            ),
+            WindowsPkg(
+                slug="windows-2022",
+                display_name="Windows 2022",
+                arch="amd64",
+                pkg_type="NSIS",
+            ),
+            WindowsPkg(
+                slug="windows-2022",
+                display_name="Windows 2022",
+                arch="amd64",
+                pkg_type="MSI",
+            ),
+        ],
+    }
+)
+
 
 class NeedsTracker:
     def __init__(self):
@@ -192,221 +501,7 @@ def generate_workflows(ctx: Context):
             },
         },
     }
-
-    test_salt_pkg_listing = PlatformDefinitions(
-        {
-            "linux": [
-                Linux(
-                    slug="rockylinux-8",
-                    display_name="Rocky Linux 8",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="rockylinux-8-arm64",
-                    display_name="Rocky Linux 8 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="rockylinux-9",
-                    display_name="Rocky Linux 9",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="rockylinux-9-arm64",
-                    display_name="Rocky Linux 9 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="amazonlinux-2",
-                    display_name="Amazon Linux 2",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="amazonlinux-2-arm64",
-                    display_name="Amazon Linux 2 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="amazonlinux-2023",
-                    display_name="Amazon Linux 2023",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="amazonlinux-2023-arm64",
-                    display_name="Amazon Linux 2023 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="debian-11",
-                    display_name="Debian 11",
-                    arch="x86_64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="debian-11-arm64",
-                    display_name="Debian 11 Arm64",
-                    arch="arm64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="debian-12",
-                    display_name="Debian 12",
-                    arch="x86_64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="debian-12-arm64",
-                    display_name="Debian 12 Arm64",
-                    arch="arm64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="photonos-4",
-                    display_name="Photon OS 4",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="photonos-4-arm64",
-                    display_name="Photon OS 4 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="photonos-4",
-                    display_name="Photon OS 4",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                    fips=True,
-                ),
-                Linux(
-                    slug="photonos-4-arm64",
-                    display_name="Photon OS 4 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                    fips=True,
-                ),
-                Linux(
-                    slug="photonos-5",
-                    display_name="Photon OS 5",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="photonos-5-arm64",
-                    display_name="Photon OS 5 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                ),
-                Linux(
-                    slug="photonos-5",
-                    display_name="Photon OS 5",
-                    arch="x86_64",
-                    pkg_type="rpm",
-                    fips=True,
-                ),
-                Linux(
-                    slug="photonos-5-arm64",
-                    display_name="Photon OS 5 Arm64",
-                    arch="arm64",
-                    pkg_type="rpm",
-                    fips=True,
-                ),
-                Linux(
-                    slug="ubuntu-20.04",
-                    display_name="Ubuntu 20.04",
-                    arch="x86_64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="ubuntu-20.04-arm64",
-                    display_name="Ubuntu 20.04 Arm64",
-                    arch="arm64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="ubuntu-22.04",
-                    display_name="Ubuntu 22.04",
-                    arch="x86_64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="ubuntu-22.04-arm64",
-                    display_name="Ubuntu 22.04 Arm64",
-                    arch="arm64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="ubuntu-24.04",
-                    display_name="Ubuntu 24.04",
-                    arch="x86_64",
-                    pkg_type="deb",
-                ),
-                Linux(
-                    slug="ubuntu-24.04-arm64",
-                    display_name="Ubuntu 24.04 Arm64",
-                    arch="arm64",
-                    pkg_type="deb",
-                ),
-            ],
-            "macos": [
-                MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"),
-                MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"),
-                MacOS(
-                    slug="macos-13-arm64",
-                    display_name="macOS 13 Arm64",
-                    arch="arm64",
-                    runner="macos-13-xlarge",
-                ),
-            ],
-            "windows": [
-                Windows(
-                    slug="windows-2016",
-                    display_name="Windows 2016",
-                    arch="amd64",
-                    pkg_type="NSIS",
-                ),
-                Windows(
-                    slug="windows-2016",
-                    display_name="Windows 2016",
-                    arch="amd64",
-                    pkg_type="MSI",
-                ),
-                Windows(
-                    slug="windows-2019",
-                    display_name="Windows 2019",
-                    arch="amd64",
-                    pkg_type="NSIS",
-                ),
-                Windows(
-                    slug="windows-2019",
-                    display_name="Windows 2019",
-                    arch="amd64",
-                    pkg_type="MSI",
-                ),
-                Windows(
-                    slug="windows-2022",
-                    display_name="Windows 2022",
-                    arch="amd64",
-                    pkg_type="NSIS",
-                ),
-                Windows(
-                    slug="windows-2022",
-                    display_name="Windows 2022",
-                    arch="amd64",
-                    pkg_type="MSI",
-                ),
-            ],
-        }
-    )
+    test_salt_pkg_listing = TEST_SALT_PKG_LISTING
 
     build_rpms_listing = []
     rpm_os_versions: dict[str, list[str]] = {
@@ -475,6 +570,9 @@ def generate_workflows(ctx: Context):
             "includes": includes,
             "conclusion_needs": NeedsTracker(),
             "test_salt_needs": NeedsTracker(),
+            "test_salt_linux_needs": NeedsTracker(),
+            "test_salt_macos_needs": NeedsTracker(),
+            "test_salt_windows_needs": NeedsTracker(),
             "test_salt_pkg_needs": NeedsTracker(),
             "test_repo_needs": NeedsTracker(),
             "prepare_workflow_needs": NeedsTracker(),
diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py
index 6f79b4c4493..c11677b0079 100644
--- a/tools/utils/__init__.py
+++ b/tools/utils/__init__.py
@@ -70,6 +70,31 @@ class OS:
 class Linux(OS):
     platform: str = attr.ib(default="linux")
     fips: bool = attr.ib(default=False)
+    container: str = attr.ib(default=None)
+
+    @property
+    def job_name(self):
+        return f"test-{ self.slug.replace('.', '') }{'-fips' if self.fips else ''}"
+
+    def as_dict(self):
+        return {
+            "platform": self.platform,
+            "slug": self.slug,
+            "arch": self.arch,
+            "display_name": self.display_name,
+            "pkg_type": self.pkg_type,
+            "fips": self.fips,
+            "container": self.container,
+            "job_name": self.job_name,
+        }
+
+
+@attr.s(frozen=True, slots=True)
+class LinuxPkg(Linux):
+
+    @property
+    def job_name(self):
+        return f"test-pkg-{ self.slug.replace('.', '') }{ '-fips' if self.fips else ''}"
 
 
 @attr.s(frozen=True, slots=True)
@@ -81,6 +106,29 @@ class MacOS(OS):
     def _default_runner(self):
         return self.slug
 
+    @property
+    def job_name(self):
+        return f"test-{ self.slug.replace('.', '') }"
+
+    def as_dict(self):
+        return {
+            "platform": self.platform,
+            "slug": self.slug,
+            "arch": self.arch,
+            "display_name": self.display_name,
+            "pkg_type": self.pkg_type,
+            "runner": self.runner,
+            "job_name": self.job_name,
+        }
+
+
+@attr.s(frozen=True, slots=True)
+class MacOSPkg(MacOS):
+
+    @property
+    def job_name(self):
+        return f"test-pkg-{ self.slug.replace('.', '') }"
+
 
 @attr.s(frozen=True, slots=True)
 class Windows(OS):
@@ -89,6 +137,28 @@ class Windows(OS):
     def _get_default_arch(self):
         return "amd64"
 
+    @property
+    def job_name(self):
+        return f"test-{ self.slug.replace('.', '') }"
+
+    def as_dict(self):
+        return {
+            "platform": self.platform,
+            "slug": self.slug,
+            "arch": self.arch,
+            "display_name": self.display_name,
+            "pkg_type": self.pkg_type,
+            "job_name": self.job_name,
+        }
+
+
+@attr.s(frozen=True, slots=True)
+class WindowsPkg(Windows):
+
+    @property
+    def job_name(self):
+        return f"test-pkg-{ self.slug.replace('.', '') }-{ self.pkg_type.lower() }"
+
 
 class PlatformDefinitions(TypedDict):
     linux: list[Linux]
@@ -186,7 +256,9 @@ class Version(packaging.version.Version):
         return hash(str(self))
 
 
-def get_salt_releases(ctx: Context, repository: str) -> list[Version]:
+def get_salt_releases(
+    ctx: Context, repository: str = "saltstack/salt"
+) -> list[Version]:
     """
     Return a list of salt versions
     """
diff --git a/tools/vm.py b/tools/vm.py
index 13103eda912..b520a9dc265 100644
--- a/tools/vm.py
+++ b/tools/vm.py
@@ -331,10 +331,14 @@ def test(
         env["PRINT_TEST_SELECTION"] = "1"
     else:
         env["PRINT_TEST_SELECTION"] = "0"
-    if skip_code_coverage:
-        env["SKIP_CODE_COVERAGE"] = "1"
-    else:
-        env["SKIP_CODE_COVERAGE"] = "0"
+
+    # skip running code coverage for now
+    ## if skip_code_coverage:
+    ##     env["SKIP_CODE_COVERAGE"] = "1"
+    ## else:
+    ##     env["SKIP_CODE_COVERAGE"] = "0"
+    env["SKIP_CODE_COVERAGE"] = "1"
+
     if print_system_info:
         env["PRINT_SYSTEM_INFO"] = "1"
     else: