Merge pull request #66233 from s0undt3ch/hotfix/merge-forward-into-3007.x

[3007.x] Merge 3006.x into 3007.x
This commit is contained in:
Pedro Algarvio 2024-04-04 03:52:36 +01:00 committed by GitHub
commit f527459022
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
146 changed files with 3698 additions and 1331 deletions

View file

@ -1,7 +1,7 @@
### What does this PR do?
### What issues does this PR fix or reference?
Fixes:
Fixes
### Previous Behavior
Remove this section if not relevant

View file

@ -163,7 +163,7 @@ jobs:
macos-dependencies:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
timeout-minutes: 90
strategy:
fail-fast: false
@ -171,7 +171,7 @@ jobs:
include:
- distro-slug: macos-12
arch: x86_64
- distro-slug: macos-13-xlarge
- distro-slug: macos-13-arm64
arch: arm64
steps:

View file

@ -65,7 +65,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-deps-linux-${{ matrix.arch }}
cache-prefix: ${{ inputs.cache-seed }}|build-deps|linux|${{ matrix.arch }}
- name: Setup Relenv
id: setup-relenv
@ -116,7 +116,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-deps-macos
cache-prefix: ${{ inputs.cache-seed }}|build-deps|macos
- name: Setup Relenv
id: setup-relenv
@ -166,7 +166,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-deps-windows-${{ matrix.arch }}
cache-prefix: ${{ inputs.cache-seed }}|build-deps|windows|${{ matrix.arch }}
- name: Setup Relenv
id: setup-relenv

View file

@ -65,7 +65,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-windows
cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|linux
- name: Setup Salt Version
id: setup-salt-version
@ -130,7 +130,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos
cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|macos
- name: Setup Salt Version
id: setup-salt-version
@ -185,7 +185,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos
cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|windows
- name: Setup Salt Version
id: setup-salt-version

View file

@ -43,7 +43,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -60,7 +61,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -198,19 +199,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -230,10 +223,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -509,16 +498,16 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
almalinux-8-pkg-tests:
name: Alma Linux 8 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
rockylinux-8-pkg-tests:
name: Rocky Linux 8 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8') }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -530,16 +519,16 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-8-arm64-pkg-tests:
name: Alma Linux 8 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
rockylinux-8-arm64-pkg-tests:
name: Rocky Linux 8 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -551,8 +540,8 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-pkg-tests:
name: Alma Linux 9 Package Test
rockylinux-9-pkg-tests:
name: Rocky Linux 9 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -560,7 +549,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -572,16 +561,16 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-arm64-pkg-tests:
name: Alma Linux 9 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
rockylinux-9-arm64-pkg-tests:
name: Rocky Linux 9 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-9-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -595,7 +584,7 @@ jobs:
amazonlinux-2-pkg-tests:
name: Amazon Linux 2 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -616,7 +605,7 @@ jobs:
amazonlinux-2-arm64-pkg-tests:
name: Amazon Linux 2 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -637,7 +626,7 @@ jobs:
amazonlinux-2023-pkg-tests:
name: Amazon Linux 2023 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -679,7 +668,7 @@ jobs:
centos-7-pkg-tests:
name: CentOS 7 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -700,7 +689,7 @@ jobs:
debian-10-pkg-tests:
name: Debian 10 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-10') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -721,7 +710,7 @@ jobs:
debian-11-pkg-tests:
name: Debian 11 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -742,7 +731,7 @@ jobs:
debian-11-arm64-pkg-tests:
name: Debian 11 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -763,7 +752,7 @@ jobs:
debian-12-pkg-tests:
name: Debian 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -784,7 +773,7 @@ jobs:
debian-12-arm64-pkg-tests:
name: Debian 12 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -805,7 +794,7 @@ jobs:
photonos-4-pkg-tests:
name: Photon OS 4 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -827,7 +816,7 @@ jobs:
photonos-4-arm64-pkg-tests:
name: Photon OS 4 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -849,7 +838,7 @@ jobs:
photonos-5-pkg-tests:
name: Photon OS 5 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -871,7 +860,7 @@ jobs:
photonos-5-arm64-pkg-tests:
name: Photon OS 5 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -893,7 +882,7 @@ jobs:
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -914,7 +903,7 @@ jobs:
ubuntu-2004-arm64-pkg-tests:
name: Ubuntu 20.04 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -935,7 +924,7 @@ jobs:
ubuntu-2204-pkg-tests:
name: Ubuntu 22.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -977,7 +966,7 @@ jobs:
macos-12-pkg-tests:
name: macOS 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-12') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -985,6 +974,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -998,7 +988,7 @@ jobs:
macos-13-pkg-tests:
name: macOS 13 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1006,6 +996,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1017,7 +1008,7 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1026,7 +1017,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1040,7 +1032,7 @@ jobs:
windows-2016-nsis-pkg-tests:
name: Windows 2016 NSIS Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1061,7 +1053,7 @@ jobs:
windows-2016-msi-pkg-tests:
name: Windows 2016 MSI Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1082,7 +1074,7 @@ jobs:
windows-2019-nsis-pkg-tests:
name: Windows 2019 NSIS Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1103,7 +1095,7 @@ jobs:
windows-2019-msi-pkg-tests:
name: Windows 2019 MSI Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1166,7 +1158,7 @@ jobs:
windows-2016:
name: Windows 2016 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1187,7 +1179,7 @@ jobs:
windows-2019:
name: Windows 2019 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1229,13 +1221,14 @@ jobs:
macos-12:
name: macOS 12 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-12') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1250,13 +1243,14 @@ jobs:
macos-13:
name: macOS 13 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1269,7 +1263,7 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1277,7 +1271,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1290,15 +1285,15 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8:
name: Alma Linux 8 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
rockylinux-8:
name: Rocky Linux 8 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1311,15 +1306,15 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8-arm64:
name: Alma Linux 8 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
rockylinux-8-arm64:
name: Rocky Linux 8 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1332,15 +1327,15 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9:
name: Alma Linux 9 Test
rockylinux-9:
name: Rocky Linux 9 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1353,15 +1348,15 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9-arm64:
name: Alma Linux 9 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
rockylinux-9-arm64:
name: Rocky Linux 9 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-9-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1376,7 +1371,7 @@ jobs:
amazonlinux-2:
name: Amazon Linux 2 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1397,7 +1392,7 @@ jobs:
amazonlinux-2-arm64:
name: Amazon Linux 2 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1418,7 +1413,7 @@ jobs:
amazonlinux-2023:
name: Amazon Linux 2023 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1481,7 +1476,7 @@ jobs:
centos-7:
name: CentOS 7 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1502,7 +1497,7 @@ jobs:
debian-10:
name: Debian 10 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-10') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1523,7 +1518,7 @@ jobs:
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1544,7 +1539,7 @@ jobs:
debian-11-arm64:
name: Debian 11 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1565,7 +1560,7 @@ jobs:
debian-12:
name: Debian 12 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1586,7 +1581,7 @@ jobs:
debian-12-arm64:
name: Debian 12 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1607,7 +1602,7 @@ jobs:
fedora-39:
name: Fedora 39 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'fedora-39') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1628,7 +1623,7 @@ jobs:
opensuse-15:
name: Opensuse 15 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'opensuse-15') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1649,7 +1644,7 @@ jobs:
photonos-4:
name: Photon OS 4 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1671,7 +1666,7 @@ jobs:
photonos-4-arm64:
name: Photon OS 4 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1693,7 +1688,7 @@ jobs:
photonos-5:
name: Photon OS 5 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1715,7 +1710,7 @@ jobs:
photonos-5-arm64:
name: Photon OS 5 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1737,7 +1732,7 @@ jobs:
ubuntu-2004:
name: Ubuntu 20.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1758,7 +1753,7 @@ jobs:
ubuntu-2004-arm64:
name: Ubuntu 20.04 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1779,7 +1774,7 @@ jobs:
ubuntu-2204:
name: Ubuntu 22.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1831,11 +1826,11 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- almalinux-8
- almalinux-8-arm64
- almalinux-9
- almalinux-9-arm64
- macos-13-arm64
- rockylinux-8
- rockylinux-8-arm64
- rockylinux-9
- rockylinux-9-arm64
- amazonlinux-2
- amazonlinux-2-arm64
- amazonlinux-2023
@ -1985,11 +1980,11 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- almalinux-8
- almalinux-8-arm64
- almalinux-9
- almalinux-9-arm64
- macos-13-arm64
- rockylinux-8
- rockylinux-8-arm64
- rockylinux-9
- rockylinux-9-arm64
- amazonlinux-2
- amazonlinux-2-arm64
- amazonlinux-2023
@ -2011,10 +2006,10 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- almalinux-8-pkg-tests
- almalinux-8-arm64-pkg-tests
- almalinux-9-pkg-tests
- almalinux-9-arm64-pkg-tests
- rockylinux-8-pkg-tests
- rockylinux-8-arm64-pkg-tests
- rockylinux-9-pkg-tests
- rockylinux-9-arm64-pkg-tests
- amazonlinux-2-pkg-tests
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
@ -2035,7 +2030,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -100,7 +100,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -117,7 +118,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -255,19 +256,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -287,10 +280,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -574,8 +563,8 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
almalinux-8-pkg-tests:
name: Alma Linux 8 Package Test
rockylinux-8-pkg-tests:
name: Rocky Linux 8 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -583,7 +572,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -595,8 +584,8 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-8-arm64-pkg-tests:
name: Alma Linux 8 Arm64 Package Test
rockylinux-8-arm64-pkg-tests:
name: Rocky Linux 8 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -604,7 +593,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -616,8 +605,8 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-pkg-tests:
name: Alma Linux 9 Package Test
rockylinux-9-pkg-tests:
name: Rocky Linux 9 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -625,7 +614,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -637,8 +626,8 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-arm64-pkg-tests:
name: Alma Linux 9 Arm64 Package Test
rockylinux-9-arm64-pkg-tests:
name: Rocky Linux 9 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -646,7 +635,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1050,6 +1039,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1071,6 +1061,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1082,7 +1073,7 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1091,7 +1082,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1301,6 +1293,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1322,6 +1315,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1334,7 +1328,7 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1342,7 +1336,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1355,15 +1350,15 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8:
name: Alma Linux 8 Test
rockylinux-8:
name: Rocky Linux 8 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1376,15 +1371,15 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8-arm64:
name: Alma Linux 8 Arm64 Test
rockylinux-8-arm64:
name: Rocky Linux 8 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1397,15 +1392,15 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9:
name: Alma Linux 9 Test
rockylinux-9:
name: Rocky Linux 9 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1418,15 +1413,15 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9-arm64:
name: Alma Linux 9 Arm64 Test
rockylinux-9-arm64:
name: Rocky Linux 9 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1896,11 +1891,11 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- almalinux-8
- almalinux-8-arm64
- almalinux-9
- almalinux-9-arm64
- macos-13-arm64
- rockylinux-8
- rockylinux-8-arm64
- rockylinux-9
- rockylinux-9-arm64
- amazonlinux-2
- amazonlinux-2-arm64
- amazonlinux-2023
@ -2808,11 +2803,11 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- almalinux-8
- almalinux-8-arm64
- almalinux-9
- almalinux-9-arm64
- macos-13-arm64
- rockylinux-8
- rockylinux-8-arm64
- rockylinux-9
- rockylinux-9-arm64
- amazonlinux-2
- amazonlinux-2-arm64
- amazonlinux-2023
@ -2893,10 +2888,10 @@ jobs:
- build-pkgs-src
- combine-all-code-coverage
- publish-repositories
- almalinux-8-pkg-tests
- almalinux-8-arm64-pkg-tests
- almalinux-9-pkg-tests
- almalinux-9-arm64-pkg-tests
- rockylinux-8-pkg-tests
- rockylinux-8-arm64-pkg-tests
- rockylinux-9-pkg-tests
- rockylinux-9-arm64-pkg-tests
- amazonlinux-2-pkg-tests
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
@ -2917,7 +2912,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -90,7 +90,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -107,7 +108,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -245,19 +246,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -277,10 +270,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -556,8 +545,8 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
almalinux-8-pkg-tests:
name: Alma Linux 8 Package Test
rockylinux-8-pkg-tests:
name: Rocky Linux 8 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -565,7 +554,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -577,8 +566,8 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-8-arm64-pkg-tests:
name: Alma Linux 8 Arm64 Package Test
rockylinux-8-arm64-pkg-tests:
name: Rocky Linux 8 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -586,7 +575,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -598,8 +587,8 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-pkg-tests:
name: Alma Linux 9 Package Test
rockylinux-9-pkg-tests:
name: Rocky Linux 9 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -607,7 +596,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -619,8 +608,8 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-arm64-pkg-tests:
name: Alma Linux 9 Arm64 Package Test
rockylinux-9-arm64-pkg-tests:
name: Rocky Linux 9 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -628,7 +617,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1032,6 +1021,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1053,6 +1043,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1064,7 +1055,7 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1073,7 +1064,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1283,6 +1275,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1304,6 +1297,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1316,7 +1310,7 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1324,7 +1318,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1337,15 +1332,15 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8:
name: Alma Linux 8 Test
rockylinux-8:
name: Rocky Linux 8 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1358,15 +1353,15 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8-arm64:
name: Alma Linux 8 Arm64 Test
rockylinux-8-arm64:
name: Rocky Linux 8 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1379,15 +1374,15 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9:
name: Alma Linux 9 Test
rockylinux-9:
name: Rocky Linux 9 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1400,15 +1395,15 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9-arm64:
name: Alma Linux 9 Arm64 Test
rockylinux-9-arm64:
name: Rocky Linux 9 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1878,11 +1873,11 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- almalinux-8
- almalinux-8-arm64
- almalinux-9
- almalinux-9-arm64
- macos-13-arm64
- rockylinux-8
- rockylinux-8-arm64
- rockylinux-9
- rockylinux-9-arm64
- amazonlinux-2
- amazonlinux-2-arm64
- amazonlinux-2023
@ -2034,11 +2029,11 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- almalinux-8
- almalinux-8-arm64
- almalinux-9
- almalinux-9-arm64
- macos-13-arm64
- rockylinux-8
- rockylinux-8-arm64
- rockylinux-9
- rockylinux-9-arm64
- amazonlinux-2
- amazonlinux-2-arm64
- amazonlinux-2023
@ -2060,10 +2055,10 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- almalinux-8-pkg-tests
- almalinux-8-arm64-pkg-tests
- almalinux-9-pkg-tests
- almalinux-9-arm64-pkg-tests
- rockylinux-8-pkg-tests
- rockylinux-8-arm64-pkg-tests
- rockylinux-9-pkg-tests
- rockylinux-9-arm64-pkg-tests
- amazonlinux-2-pkg-tests
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
@ -2084,7 +2079,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -73,7 +73,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -90,7 +91,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -237,19 +238,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -269,10 +262,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -556,8 +545,8 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
almalinux-8-pkg-tests:
name: Alma Linux 8 Package Test
rockylinux-8-pkg-tests:
name: Rocky Linux 8 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -565,7 +554,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -577,8 +566,8 @@ jobs:
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-8-arm64-pkg-tests:
name: Alma Linux 8 Arm64 Package Test
rockylinux-8-arm64-pkg-tests:
name: Rocky Linux 8 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -586,7 +575,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -598,8 +587,8 @@ jobs:
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-pkg-tests:
name: Alma Linux 9 Package Test
rockylinux-9-pkg-tests:
name: Rocky Linux 9 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -607,7 +596,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -619,8 +608,8 @@ jobs:
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
almalinux-9-arm64-pkg-tests:
name: Alma Linux 9 Arm64 Package Test
rockylinux-9-arm64-pkg-tests:
name: Rocky Linux 9 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -628,7 +617,7 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1032,6 +1021,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1053,6 +1043,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1064,7 +1055,7 @@ jobs:
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1073,7 +1064,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1283,6 +1275,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1304,6 +1297,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1316,7 +1310,7 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1324,7 +1318,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1337,15 +1332,15 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8:
name: Alma Linux 8 Test
rockylinux-8:
name: Rocky Linux 8 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8
distro-slug: rockylinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1358,15 +1353,15 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-8-arm64:
name: Alma Linux 8 Arm64 Test
rockylinux-8-arm64:
name: Rocky Linux 8 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-8-arm64
distro-slug: rockylinux-8-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -1379,15 +1374,15 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9:
name: Alma Linux 9 Test
rockylinux-9:
name: Rocky Linux 9 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9
distro-slug: rockylinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1400,15 +1395,15 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
almalinux-9-arm64:
name: Alma Linux 9 Arm64 Test
rockylinux-9-arm64:
name: Rocky Linux 9 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: almalinux-9-arm64
distro-slug: rockylinux-9-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
@ -2780,11 +2775,11 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- almalinux-8
- almalinux-8-arm64
- almalinux-9
- almalinux-9-arm64
- macos-13-arm64
- rockylinux-8
- rockylinux-8-arm64
- rockylinux-9
- rockylinux-9-arm64
- amazonlinux-2
- amazonlinux-2-arm64
- amazonlinux-2023
@ -2806,10 +2801,10 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- almalinux-8-pkg-tests
- almalinux-8-arm64-pkg-tests
- almalinux-9-pkg-tests
- almalinux-9-arm64-pkg-tests
- rockylinux-8-pkg-tests
- rockylinux-8-arm64-pkg-tests
- rockylinux-9-pkg-tests
- rockylinux-9-arm64-pkg-tests
- amazonlinux-2-pkg-tests
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
@ -2830,7 +2825,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -163,7 +163,7 @@ jobs:
macos-dependencies:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
timeout-minutes: 90
strategy:
fail-fast: false

View file

@ -17,7 +17,7 @@
relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>"
source: "<{ backend }>"
<%- if gh_environment %>
<%- if gh_environment != "ci" %>
environment: <{ gh_environment }>
sign-macos-packages: true
sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %>

View file

@ -1,3 +1,5 @@
<%- set gh_environment = gh_environment|default("ci") %>
<%- extends 'layout.yml.jinja' %>
<%- set pre_commit_version = "3.0.4" %>

View file

@ -91,7 +91,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -108,7 +109,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -260,21 +261,13 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
}> ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -294,10 +287,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |

View file

@ -69,10 +69,10 @@ jobs:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["linux"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- for os in test_salt_pkg_downloads_listing["linux"] %>
- distro-slug: <{ os.slug }>
arch: <{ os.arch }>
pkg-type: <{ os.pkg_type }>
<%- endfor %>
steps:
@ -271,7 +271,7 @@ jobs:
macos:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
env:
USE_S3_CACHE: 'false'
environment: ${{ inputs.environment }}
@ -280,10 +280,10 @@ jobs:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["macos"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- for os in test_salt_pkg_downloads_listing["macos"] %>
- distro-slug: <{ os.slug }>
arch: <{ os.arch }>
pkg-type: <{ os.pkg_type }>
<%- endfor %>
steps:
@ -485,10 +485,10 @@ jobs:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["windows"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- for os in test_salt_pkg_downloads_listing["windows"] %>
- distro-slug: <{ os.slug }>
arch: <{ os.arch }>
pkg-type: <{ os.pkg_type }>
<%- endfor %>
steps:

View file

@ -1,28 +1,32 @@
<%- for slug, display_name, arch, pkg_type, fips in test_salt_pkg_listing["linux"] %>
<%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %>
<%- for os in test_salt_pkg_listing["linux"] %>
<%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> Package Test
name: <{ os.display_name }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: linux
arch: <{ arch }>
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ pkg_type }>
pkg-type: <{ os.pkg_type }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
<%- if fips == "fips" %>
<%- if os.fips %>
fips: true
<%- endif %>
@ -30,23 +34,28 @@
<%- for slug, display_name, arch in test_salt_pkg_listing["macos"] %>
<%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %>
<%- for os in test_salt_pkg_listing["macos"] %>
<%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> Package Test
name: <{ os.display_name }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
runner: <{ os.runner }>
nox-session: ci-test-onedir
platform: macos
arch: <{ arch }>
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: macos
nox-version: <{ nox_version }>
@ -58,31 +67,33 @@
<%- endfor %>
<%- for slug, display_name, arch in test_salt_pkg_listing["windows"] %>
<%- for pkg_type in ("NSIS", "MSI") %>
<%- set job_name = "{}-{}-pkg-tests".format(slug.replace(".", ""), pkg_type.lower()) %>
<%- for os in test_salt_pkg_listing["windows"] %>
<%- set job_name = "{}-{}-pkg-tests".format(os.slug.replace(".", ""), os.pkg_type.lower()) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> <{ pkg_type }> Package Test
name: <{ os.display_name }> <{ os.pkg_type }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-windows.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: windows
arch: <{ arch }>
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ pkg_type }>
pkg-type: <{ os.pkg_type }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
<%- endfor %>
<%- endfor %>

View file

@ -6,18 +6,22 @@
<%- set partial_testrun_timeout_value = 360 %>
<%- set windows_full_testrun_timeout_value = full_testrun_timeout_value + 30 %>
<%- for slug, display_name, arch in test_salt_listing["windows"] %>
<%- for os in test_salt_listing["windows"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }> Test
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-windows.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: windows
arch: amd64
@ -33,21 +37,26 @@
<%- endfor %>
<%- for slug, display_name, arch in test_salt_listing["macos"] %>
<%- for os in test_salt_listing["macos"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }> Test
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
runner: <{ os.runner }>
nox-session: ci-test-onedir
platform: macos
arch: <{ arch }>
arch: <{ os.arch }>
nox-version: <{ nox_version }>
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
@ -59,21 +68,25 @@
<%- endfor %>
<%- for slug, display_name, arch, fips in test_salt_listing["linux"] %>
<%- for os in test_salt_listing["linux"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }> Test
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: linux
arch: <{ arch }>
arch: <{ os.arch }>
nox-version: <{ nox_version }>
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
@ -82,7 +95,7 @@
skip-code-coverage: <{ skip_test_coverage_check }>
workflow-slug: <{ workflow_slug }>
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
<%- if fips == "fips" %>
<%- if os.fips %>
fips: true
<%- endif %>

View file

@ -8,6 +8,10 @@ on:
required: true
type: string
description: The OS slug to run tests against
runner:
required: true
type: string
description: The GitHub runner name
nox-session:
required: true
type: string
@ -97,7 +101,7 @@ jobs:
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
runs-on: ${{ inputs.runner }}
timeout-minutes: ${{ inputs.timeout-minutes }}
needs:
- generate-matrix

View file

@ -69,24 +69,6 @@ jobs:
fail-fast: false
matrix:
include:
- distro-slug: almalinux-8
arch: x86_64
pkg-type: package
- distro-slug: almalinux-8-arm64
arch: aarch64
pkg-type: package
- distro-slug: almalinux-8-arm64
arch: arm64
pkg-type: package
- distro-slug: almalinux-9
arch: x86_64
pkg-type: package
- distro-slug: almalinux-9-arm64
arch: aarch64
pkg-type: package
- distro-slug: almalinux-9-arm64
arch: arm64
pkg-type: package
- distro-slug: amazonlinux-2
arch: x86_64
pkg-type: package
@ -159,6 +141,24 @@ jobs:
- distro-slug: photonos-5-arm64
arch: arm64
pkg-type: package
- distro-slug: rockylinux-8
arch: x86_64
pkg-type: package
- distro-slug: rockylinux-8-arm64
arch: aarch64
pkg-type: package
- distro-slug: rockylinux-8-arm64
arch: arm64
pkg-type: package
- distro-slug: rockylinux-9
arch: x86_64
pkg-type: package
- distro-slug: rockylinux-9-arm64
arch: aarch64
pkg-type: package
- distro-slug: rockylinux-9-arm64
arch: arm64
pkg-type: package
- distro-slug: ubuntu-20.04
arch: x86_64
pkg-type: package
@ -380,7 +380,7 @@ jobs:
macos:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
env:
USE_S3_CACHE: 'false'
environment: ${{ inputs.environment }}
@ -395,10 +395,10 @@ jobs:
- distro-slug: macos-13
arch: x86_64
pkg-type: package
- distro-slug: macos-13-xlarge
- distro-slug: macos-13-arm64
arch: arm64
pkg-type: package
- distro-slug: macos-13-xlarge
- distro-slug: macos-13-arm64
arch: arm64
pkg-type: onedir

View file

@ -7,6 +7,10 @@ on:
required: true
type: string
description: The OS slug to run tests against
runner:
required: true
type: string
description: The GitHub runner name
platform:
required: true
type: string
@ -98,7 +102,7 @@ jobs:
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
runs-on: ${{ inputs.runner }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- generate-matrix

View file

@ -59,7 +59,7 @@ repos:
- id: tools
alias: generate-workflows
name: Generate GitHub Workflow Templates
files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/templates/.*)$
files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/.*)$
pass_filenames: false
args:
- pre-commit

2
changelog/53363.fixed.md Normal file
View file

@ -0,0 +1,2 @@
``user.add`` on Windows now allows you to add user names that contain all
numeric characters

1
changelog/61807.fixed.md Normal file
View file

@ -0,0 +1 @@
pkg.refresh_db on Windows now honors saltenv

1
changelog/63667.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix user and group management on Windows to handle the Everyone group

2
changelog/63848.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixes an issue in pkg.refresh_db on Windows where new package definition
files were not being picked up on the first run

1
changelog/64933.fixed.md Normal file
View file

@ -0,0 +1 @@
Display a proper error when pki commands fail in the win_pki module

1
changelog/65200.fixed.md Normal file
View file

@ -0,0 +1 @@
Prevent full system upgrade on single package install for Arch Linux

7
changelog/65611.fixed.md Normal file
View file

@ -0,0 +1,7 @@
When using s3fs, if files are deleted from the bucket, they were not deleted in
the master or minion local cache, which could lead to unexpected file copies or
even state applications. This change makes the local cache consistent with the
remote bucket by deleting files locally that are deleted from the bucket.
**NOTE** this could lead to **breakage** on your affected systems if it was
inadvertently depending on previously deleted files.

2
changelog/66049.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed an issue with file.directory state where paths would be modified in test
mode if backupname is used.

1
changelog/66127.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix content type backwards compatablity with http proxy post requests in the http utils module.

1
changelog/66143.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix systemctl with "try-restart" instead of "retry-restart" within the RPM spec, properly restarting upgraded services

1
changelog/66280.fixed.md Normal file
View file

@ -0,0 +1 @@
Add leading slash to salt helper file paths as per dh_links requirement

1
changelog/66284.fixed.md Normal file
View file

@ -0,0 +1 @@
Fixed x509.certificate_managed - ca_server did not return a certificate

1
changelog/66705.fixed.md Normal file
View file

@ -0,0 +1 @@
backport the fix from #66164 to fix #65703. use OrderedDict to fix bad indexing.

View file

@ -1,48 +1,8 @@
{
"almalinux-8-arm64": {
"ami": "ami-045a65c1d4ceb04a5",
"ami_description": "CI Image of AlmaLinux 8 arm64",
"ami_name": "salt-project/ci/almalinux/8/arm64/20240209.1843",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "ec2-user"
},
"almalinux-8": {
"ami": "ami-0b9c2b7ba679e691d",
"ami_description": "CI Image of AlmaLinux 8 x86_64",
"ami_name": "salt-project/ci/almalinux/8/x86_64/20240209.1843",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "ec2-user"
},
"almalinux-9-arm64": {
"ami": "ami-0cbdf762adc955d47",
"ami_description": "CI Image of AlmaLinux 9 arm64",
"ami_name": "salt-project/ci/almalinux/9/arm64/20240209.1844",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "ec2-user"
},
"almalinux-9": {
"ami": "ami-086c8ef0ef6951a8f",
"ami_description": "CI Image of AlmaLinux 9 x86_64",
"ami_name": "salt-project/ci/almalinux/9/x86_64/20240209.1843",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "ec2-user"
},
"amazonlinux-2-arm64": {
"ami": "ami-07bc422e281c67f10",
"ami": "ami-0fa1d515b17aa5832",
"ami_description": "CI Image of AmazonLinux 2 arm64",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20240209.1843",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20240325.2133",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -50,9 +10,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2": {
"ami": "ami-01ecdeb9a8251824e",
"ami": "ami-0c9a41917d788911e",
"ami_description": "CI Image of AmazonLinux 2 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240209.1843",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240325.2133",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -60,9 +20,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023-arm64": {
"ami": "ami-0b8a0efa9ea7ebfa4",
"ami": "ami-00644e6cc81cb8fc0",
"ami_description": "CI Image of AmazonLinux 2023 arm64",
"ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240209.1844",
"ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240325.2133",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -70,9 +30,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023": {
"ami": "ami-0f013d0d9dbaf3b06",
"ami": "ami-01ba1cac2a9ba4845",
"ami_description": "CI Image of AmazonLinux 2023 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240209.1844",
"ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240325.2133",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -90,9 +50,9 @@
"ssh_username": "arch"
},
"centos-7-arm64": {
"ami": "ami-09c1e87fa7a2be337",
"ami": "ami-0a0c4ce5d61416643",
"ami_description": "CI Image of CentOS 7 arm64",
"ami_name": "salt-project/ci/centos/7/arm64/20240209.1843",
"ami_name": "salt-project/ci/centos/7/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -100,9 +60,9 @@
"ssh_username": "centos"
},
"centos-7": {
"ami": "ami-010faf67fdabfbcdf",
"ami": "ami-06fec7a8fe157fe7d",
"ami_description": "CI Image of CentOS 7 x86_64",
"ami_name": "salt-project/ci/centos/7/x86_64/20240209.1843",
"ami_name": "salt-project/ci/centos/7/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -110,9 +70,9 @@
"ssh_username": "centos"
},
"debian-10-arm64": {
"ami": "ami-018ff5d81815e307f",
"ami": "ami-0a4d0583945dba7a7",
"ami_description": "CI Image of Debian 10 arm64",
"ami_name": "salt-project/ci/debian/10/arm64/20240209.1843",
"ami_name": "salt-project/ci/debian/10/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -120,9 +80,9 @@
"ssh_username": "admin"
},
"debian-10": {
"ami": "ami-033f768666c97d386",
"ami": "ami-0f474f6f3b4f1a981",
"ami_description": "CI Image of Debian 10 x86_64",
"ami_name": "salt-project/ci/debian/10/x86_64/20240209.1843",
"ami_name": "salt-project/ci/debian/10/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -130,9 +90,9 @@
"ssh_username": "admin"
},
"debian-11-arm64": {
"ami": "ami-02dfec8b374ad8fc8",
"ami": "ami-0e1d6f34aaeba1e58",
"ami_description": "CI Image of Debian 11 arm64",
"ami_name": "salt-project/ci/debian/11/arm64/20240209.1843",
"ami_name": "salt-project/ci/debian/11/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -140,9 +100,9 @@
"ssh_username": "admin"
},
"debian-11": {
"ami": "ami-06e4f77ed230e2def",
"ami": "ami-012327dae48ce80ac",
"ami_description": "CI Image of Debian 11 x86_64",
"ami_name": "salt-project/ci/debian/11/x86_64/20240209.1843",
"ami_name": "salt-project/ci/debian/11/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -150,9 +110,9 @@
"ssh_username": "admin"
},
"debian-12-arm64": {
"ami": "ami-05c7771a9ec62dfb2",
"ami": "ami-0527ef47cece68f54",
"ami_description": "CI Image of Debian 12 arm64",
"ami_name": "salt-project/ci/debian/12/arm64/20240209.1843",
"ami_name": "salt-project/ci/debian/12/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -160,9 +120,9 @@
"ssh_username": "admin"
},
"debian-12": {
"ami": "ami-0ac4b96d4ae5faa23",
"ami": "ami-0d9d685ae10656958",
"ami_description": "CI Image of Debian 12 x86_64",
"ami_name": "salt-project/ci/debian/12/x86_64/20240209.1843",
"ami_name": "salt-project/ci/debian/12/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -170,9 +130,9 @@
"ssh_username": "admin"
},
"fedora-39-arm64": {
"ami": "ami-04f69299edce3ff91",
"ami": "ami-00d2f2e1fccac457d",
"ami_description": "CI Image of Fedora 39 arm64",
"ami_name": "salt-project/ci/fedora/39/arm64/20240209.1844",
"ami_name": "salt-project/ci/fedora/39/arm64/20240325.2133",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -180,9 +140,9 @@
"ssh_username": "fedora"
},
"fedora-39": {
"ami": "ami-033bf14cad5d795a2",
"ami": "ami-072c01a40a6519153",
"ami_description": "CI Image of Fedora 39 x86_64",
"ami_name": "salt-project/ci/fedora/39/x86_64/20240209.1844",
"ami_name": "salt-project/ci/fedora/39/x86_64/20240325.2133",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -190,9 +150,9 @@
"ssh_username": "fedora"
},
"opensuse-15": {
"ami": "ami-023efb1898809e8fe",
"ami": "ami-04cfbfd3c214348bc",
"ami_description": "CI Image of Opensuse 15 x86_64",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20240209.1844",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20240325.2133",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -200,9 +160,9 @@
"ssh_username": "ec2-user"
},
"photonos-4-arm64": {
"ami": "ami-0d164263b5095dc45",
"ami": "ami-0bd76e6234ee685a7",
"ami_description": "CI Image of PhotonOS 4 arm64",
"ami_name": "salt-project/ci/photonos/4/arm64/20240209.1844",
"ami_name": "salt-project/ci/photonos/4/arm64/20240325.2133",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -210,9 +170,9 @@
"ssh_username": "root"
},
"photonos-4": {
"ami": "ami-0454d6f4e80b94412",
"ami": "ami-0b1947785de4b2a6e",
"ami_description": "CI Image of PhotonOS 4 x86_64",
"ami_name": "salt-project/ci/photonos/4/x86_64/20240209.1844",
"ami_name": "salt-project/ci/photonos/4/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -220,9 +180,9 @@
"ssh_username": "root"
},
"photonos-5-arm64": {
"ami": "ami-094f4c1e098fc2192",
"ami": "ami-0d02f34b9820752e4",
"ami_description": "CI Image of PhotonOS 5 arm64",
"ami_name": "salt-project/ci/photonos/5/arm64/20240209.1844",
"ami_name": "salt-project/ci/photonos/5/arm64/20240325.2133",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -230,19 +190,59 @@
"ssh_username": "root"
},
"photonos-5": {
"ami": "ami-037344b8f3d9e2650",
"ami": "ami-0fd58f07139e9622e",
"ami_description": "CI Image of PhotonOS 5 x86_64",
"ami_name": "salt-project/ci/photonos/5/x86_64/20240209.1844",
"ami_name": "salt-project/ci/photonos/5/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "root"
},
"rockylinux-8-arm64": {
"ami": "ami-0e5d23f57141e5ac4",
"ami_description": "CI Image of RockyLinux 8 arm64",
"ami_name": "salt-project/ci/rockylinux/8/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "rocky"
},
"rockylinux-8": {
"ami": "ami-0e2cac6a847d700aa",
"ami_description": "CI Image of RockyLinux 8 x86_64",
"ami_name": "salt-project/ci/rockylinux/8/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "rocky"
},
"rockylinux-9-arm64": {
"ami": "ami-0054d3b25a08d2b41",
"ami_description": "CI Image of RockyLinux 9 arm64",
"ami_name": "salt-project/ci/rockylinux/9/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "rocky"
},
"rockylinux-9": {
"ami": "ami-042d3b81138968bdb",
"ami_description": "CI Image of RockyLinux 9 x86_64",
"ami_name": "salt-project/ci/rockylinux/9/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "rocky"
},
"ubuntu-20.04-arm64": {
"ami": "ami-0eb0f403664076b82",
"ami": "ami-0bd2e3ee99c5a2f52",
"ami_description": "CI Image of Ubuntu 20.04 arm64",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240209.1843",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -250,9 +250,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-20.04": {
"ami": "ami-07ad57960d22a8b65",
"ami": "ami-0fdc19cb94bc96db3",
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240209.1843",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -260,9 +260,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04-arm64": {
"ami": "ami-05197331792cbc895",
"ami": "ami-0690e86bc116a6245",
"ami_description": "CI Image of Ubuntu 22.04 arm64",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240209.1843",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -270,9 +270,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04": {
"ami": "ami-01d9f296a69eaec3c",
"ami": "ami-0285c21e3abc8b2b2",
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240209.1843",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -280,9 +280,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04-arm64": {
"ami": "ami-0511f6146f198b645",
"ami": "ami-09e0eb04bbf2a2f35",
"ami_description": "CI Image of Ubuntu 23.04 arm64",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20240209.1843",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20240325.2134",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -290,9 +290,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04": {
"ami": "ami-0f09467d281f1a312",
"ami": "ami-029edca569b26d625",
"ami_description": "CI Image of Ubuntu 23.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20240209.1843",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20240325.2134",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -300,9 +300,9 @@
"ssh_username": "ubuntu"
},
"windows-2016": {
"ami": "ami-015acc248b175fb3c",
"ami": "ami-0474d8e7e13c81883",
"ami_description": "CI Image of Windows 2016 x86_64",
"ami_name": "salt-project/ci/windows/2016/x86_64/20240209.1844",
"ami_name": "salt-project/ci/windows/2016/x86_64/20240325.2133",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -310,9 +310,9 @@
"ssh_username": "Administrator"
},
"windows-2019": {
"ami": "ami-0318f14f8690bf17f",
"ami": "ami-07afee87d071123bf",
"ami_description": "CI Image of Windows 2019 x86_64",
"ami_name": "salt-project/ci/windows/2019/x86_64/20240209.1844",
"ami_name": "salt-project/ci/windows/2019/x86_64/20240325.2133",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -320,9 +320,9 @@
"ssh_username": "Administrator"
},
"windows-2022": {
"ami": "ami-0a78a28f614a662ad",
"ami": "ami-08f69046343f92cc4",
"ami_description": "CI Image of Windows 2022 x86_64",
"ami_name": "salt-project/ci/windows/2022/x86_64/20240209.1844",
"ami_name": "salt-project/ci/windows/2022/x86_64/20240325.2133",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",

View file

@ -4,3 +4,10 @@ relenv_version: "0.15.1"
release_branches:
- "3006.x"
- "3007.x"
mandatory_os_slugs:
- rockylinux-9
- amazonlinux-2023-arm64
- archlinux-lts
- macos-13-arm64
- ubuntu-22.04-arm64
- windows-2022

View file

@ -397,16 +397,17 @@ winrepo_source_dir
:conf_minion:`winrepo_source_dir` (str)
The location of the .sls files on the Salt file server. This allows for using
different environments. Default is ``salt://win/repo-ng/``\.
The location of the .sls files on the Salt file server. Default is
``salt://win/repo-ng/``.
.. warning::
If the default for ``winrepo_dir_ng`` is changed, this setting may need to
be changed on each minion. The default setting for ``winrepo_dir_ng`` is
``/srv/salt/win/repo-ng``\. If that were changed to
``/srv/salt/new/repo-ng``\, then the ``winrepo_source_dir`` would need to be
If the default for ``winrepo_dir_ng`` is changed, then this setting will
also need to be changed on each minion. The default setting for
``winrepo_dir_ng`` is ``/srv/salt/win/repo-ng``. If that were changed to
``/srv/salt/new/repo-ng`` then the ``winrepo_source_dir`` would need to be
changed to ``salt://new/repo-ng``
.. _masterless-minion-config:
Masterless Minion Configuration
@ -430,7 +431,7 @@ winrepo_dir
This setting is maintained for backwards compatibility with legacy minions. It
points to the location in the ``file_roots`` where the winrepo files are kept.
The default is: ``C:\salt\srv\salt\win\repo``
The default is: ``C:\ProgramData\Salt Project\Salt\srv\salt\win\repo``
winrepo_dir_ng
--------------
@ -438,7 +439,7 @@ winrepo_dir_ng
:conf_minion:`winrepo_dir_ng` (str)
The location in the ``file_roots`` where the winrepo files are kept. The default
is ``C:\salt\srv\salt\win\repo-ng``\.
is ``C:\ProgramData\Salt Project\Salt\srv\salt\win\repo-ng``.
.. warning::
You can change the location of the winrepo directory. However, it must
@ -483,6 +484,137 @@ default is a list containing a single URL:
.. _usage:
Sample Configurations
*********************
Masterless
==========
The configs in this section are for working with winrepo on a Windows minion
using ``salt-call --local``.
Default Configuration
---------------------
This is the default configuration if nothing is configured in the minion config.
The config is shown here for clarity. These are the defaults:
.. code-block:: yaml
file_roots:
base:
- C:\ProgramData\Salt Project\Salt\srv\salt
winrepo_source_dir: 'salt://win/repo-ng'
winrepo_dir_ng: C:\ProgramData\Salt Project\Salt\srv\salt\win\repo-ng
The :mod:`winrepo.update_git_repos <salt.modules.winrepo.update_git_repos>`
command will clone the repository to ``win\repo-ng`` on the file_roots.
Multiple Salt Environments
--------------------------
This starts to get a little tricky. The winrepo repository doesn't
get cloned to each environment when you run
:mod:`winrepo.update_git_repos <salt.runners.winrepo.update_git_repos>`, so to
make this work, all environments share the same winrepo. Applying states using
the ``saltenv`` option will find the state files in the appropriate environment,
but the package definition files will always be pulled from the same location.
Therefore, you have to put the same winrepo location in each saltenv. Here's how
this would look:
.. code-block:: yaml
file_roots:
base:
- C:\ProgramData\Salt Project\Salt\srv\salt\base
- C:\ProgramData\Salt Project\Salt\srv\salt\winrepo
test:
- C:\ProgramData\Salt Project\Salt\srv\salt\test
- C:\ProgramData\Salt Project\Salt\srv\salt\winrepo
winrepo_source_dir: 'salt://salt-winrepo-ng'
winrepo_dir_ng: C:\ProgramData\Salt Project\Salt\srv\salt\winrepo
winrepo_dir: C:\ProgramData\Salt Project\Salt\srv\salt\winrepo
When you run
:mod:`winrepo.update_git_repos <salt.runners.winrepo.update_git_repos>` the
Git repository will be cloned to the location specified in the
``winrepo_dir_ng`` setting. I specified the ``winrepo_dir`` setting just so
everything gets cloned to the same place. The directory that gets cloned is
named ``salt-winrepo-ng`` so you specify that in the ``winrepo_source_dir``
setting.
The ``winrepo`` directory should only contain the package definition files. You
wouldn't want to place any states in the ``winrepo`` directory as they will be
available to both environments.
Master
======
When working in a Master/Minion environment you have to split up some of the
config settings between the master and the minion. Here are some sample configs
for winrepo in a Master/Minion environment.
Default Configuration
---------------------
This is the default configuration if nothing is configured. The config is shown
here for clarity. These are the defaults on the master:
.. code-block:: yaml
file_roots:
base:
- /srv/salt
winrepo_dir_ng: /srv/salt/win/repo-ng
This is the default in the minion config:
.. code-block:: yaml
winrepo_source_dir: 'salt://win/repo-ng'
The :mod:`winrepo.update_git_repos <salt.runners.winrepo.update_git_repos>`
command will clone the repository to ``win\repo-ng`` on the file_roots.
Multiple Salt Environments
--------------------------
To set up multiple saltenvs using a Master/Minion configuration set the
following in the master config:
.. code-block:: yaml
file_roots:
base:
- /srv/salt/base
- /srv/salt/winrepo
test:
- /srv/salt/test
- /srv/salt/winrepo
winrepo_dir_ng: /srv/salt/winrepo
winrepo_dir: /srv/salt/winrepo
Use the winrepo runner to set up the winrepo repository on the master.
.. code-block:: bash
salt-run winrepo.update_git_repos
The winrepo will be cloned to ``/srv/salt/winrepo`` under a directory named
``salt-winrepo-ng``.
Set the following on the minion config so the minion knows where to find the
package definition files in the file_roots:
.. code-block:: yaml
winrepo_source_dir: 'salt://salt-winrepo-ng'
The same stipulations apply in a Master/Minion configuration as they do in a
Masterless configuration
Usage
*****

View file

@ -1,6 +1,6 @@
opt/saltstack/salt/salt-master /usr/bin/salt-master
opt/saltstack/salt/salt usr/bin/salt
opt/saltstack/salt/salt-cp usr/bin/salt-cp
opt/saltstack/salt/salt-key usr/bin/salt-key
opt/saltstack/salt/salt-run usr/bin/salt-run
opt/saltstack/salt/spm usr/bin/spm
opt/saltstack/salt/salt /usr/bin/salt
opt/saltstack/salt/salt-cp /usr/bin/salt-cp
opt/saltstack/salt/salt-key /usr/bin/salt-key
opt/saltstack/salt/salt-run /usr/bin/salt-run
opt/saltstack/salt/spm /usr/bin/spm

View file

@ -439,16 +439,16 @@ find /etc/salt /opt/saltstack/salt /var/log/salt /var/cache/salt /var/run/salt \
# %%systemd_preun salt-syndic.service > /dev/null 2>&1
if [ $1 -eq 0 ] ; then
# Package removal, not upgrade
systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || :
systemctl stop salt-syndic.service > /dev/null 2>&1 || :
/bin/systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || :
/bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || :
fi
%preun minion
# %%systemd_preun salt-minion.service
if [ $1 -eq 0 ] ; then
# Package removal, not upgrade
systemctl --no-reload disable salt-minion.service > /dev/null 2>&1 || :
systemctl stop salt-minion.service > /dev/null 2>&1 || :
/bin/systemctl --no-reload disable salt-minion.service > /dev/null 2>&1 || :
/bin/systemctl stop salt-minion.service > /dev/null 2>&1 || :
fi
@ -456,8 +456,8 @@ fi
# %%systemd_preun salt-api.service
if [ $1 -eq 0 ] ; then
# Package removal, not upgrade
systemctl --no-reload disable salt-api.service > /dev/null 2>&1 || :
systemctl stop salt-api.service > /dev/null 2>&1 || :
/bin/systemctl --no-reload disable salt-api.service > /dev/null 2>&1 || :
/bin/systemctl stop salt-api.service > /dev/null 2>&1 || :
fi
@ -472,14 +472,6 @@ ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud
%post master
# %%systemd_post salt-master.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-master.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-master.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt
ln -s -f /opt/saltstack/salt/salt-cp %{_bindir}/salt-cp
ln -s -f /opt/saltstack/salt/salt-key %{_bindir}/salt-key
@ -498,27 +490,27 @@ if [ $1 -lt 2 ]; then
fi
fi
fi
# %%systemd_post salt-master.service
if [ $1 -gt 1 ] ; then
# Upgrade
/bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || :
else
# Initial installation
/bin/systemctl preset salt-master.service >/dev/null 2>&1 || :
fi
%post syndic
ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic
# %%systemd_post salt-syndic.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-syndic.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-syndic.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-syndic.service >/dev/null 2>&1 || :
/bin/systemctl preset salt-syndic.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic
%post minion
# %%systemd_post salt-minion.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-minion.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-minion.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt-minion %{_bindir}/salt-minion
ln -s -f /opt/saltstack/salt/salt-call %{_bindir}/salt-call
ln -s -f /opt/saltstack/salt/salt-proxy %{_bindir}/salt-proxy
@ -535,20 +527,28 @@ if [ $1 -lt 2 ]; then
fi
fi
fi
# %%systemd_post salt-minion.service
if [ $1 -gt 1 ] ; then
# Upgrade
/bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || :
else
# Initial installation
/bin/systemctl preset salt-minion.service >/dev/null 2>&1 || :
fi
%post ssh
ln -s -f /opt/saltstack/salt/salt-ssh %{_bindir}/salt-ssh
%post api
ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api
# %%systemd_post salt-api.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-api.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-api.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-api.service >/dev/null 2>&1 || :
/bin/systemctl preset salt-api.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api
%posttrans cloud
@ -589,10 +589,10 @@ fi
%postun master
# %%systemd_postun_with_restart salt-master.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-master.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || :
fi
if [ $1 -eq 0 ]; then
if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then
@ -610,18 +610,18 @@ fi
%postun syndic
# %%systemd_postun_with_restart salt-syndic.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-syndic.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-syndic.service >/dev/null 2>&1 || :
fi
%postun minion
# %%systemd_postun_with_restart salt-minion.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-minion.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || :
fi
if [ $1 -eq 0 ]; then
if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then
@ -639,10 +639,10 @@ fi
%postun api
# %%systemd_postun_with_restart salt-api.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-api.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-api.service >/dev/null 2>&1 || :
fi
%changelog

View file

@ -103,7 +103,6 @@ if ( $install_build_tools ) {
"--add Microsoft.VisualStudio.Component.Windows81SDK", `
"--add Microsoft.VisualStudio.Component.Windows10SDK.17763", `
"--add Microsoft.VisualStudio.Component.VC.140", `
"--add Microsoft.Component.VC.Runtime.UCRTSDK", `
"--lang en-US", `
"--includeRecommended", `
"--quiet", `

View file

@ -2,10 +2,10 @@ mock >= 3.0.0
# PyTest
docker
pytest >= 7.2.0
pytest-salt-factories >= 1.0.0rc29
pytest-salt-factories >= 1.0.0
pytest-helpers-namespace >= 2019.1.8
pytest-subtests
pytest-timeout
pytest-timeout >= 2.3.1
pytest-httpserver
pytest-custom-exit-code >= 0.3
flaky

View file

@ -45,6 +45,7 @@ vcert; sys_platform != 'win32'
virtualenv>=20.3.0
watchdog>=0.9.0
xmldiff>=2.4
textfsm
# Available template libraries that can be used
genshi>=0.7.3
cheetah3>=3.2.2

View file

@ -126,14 +126,14 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -299,7 +299,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -362,7 +362,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -375,9 +375,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -477,7 +477,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -489,6 +489,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -101,7 +101,7 @@ pydantic==1.10.8
# inflect
pyenchant==3.2.2
# via sphinxcontrib-spelling
pygments==2.15.1
pygments==2.17.2
# via sphinx
pytz==2024.1
# via

View file

@ -125,14 +125,14 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -303,7 +303,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -366,7 +366,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -379,9 +379,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -482,7 +482,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -494,6 +494,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -141,14 +141,14 @@ exceptiongroup==1.1.1
# pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -329,7 +329,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -400,7 +400,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -413,9 +413,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -539,7 +539,7 @@ slack-bolt==1.18.0
# via -r requirements/static/ci/linux.in
slack-sdk==3.21.3
# via slack-bolt
smmap==5.0.0
smmap==5.0.1
# via gitdb
sniffio==1.3.0
# via
@ -556,6 +556,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -40,7 +40,7 @@ pydantic-core==2.16.2
# via pydantic
pydantic==2.6.1
# via python-tools-scripts
pygments==2.13.0
pygments==2.17.2
# via rich
python-dateutil==2.8.2
# via botocore

View file

@ -124,13 +124,15 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt
# aiohttp
# aiosignal
future==1.0.0
# via textfsm
genshi==0.7.7
# via -r requirements/static/ci/common.in
geomet==0.2.1.post1
@ -261,7 +263,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -324,7 +326,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -337,9 +339,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -439,6 +441,7 @@ six==1.15.0
# python-dateutil
# pyvmomi
# pywinrm
# textfsm
# websocket-client
smmap==5.0.1
# via
@ -452,6 +455,8 @@ tempora==5.3.0
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt
# portend
textfsm==1.1.3
# via -r requirements/static/ci/common.in
timelib==0.3.0
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt

View file

@ -123,14 +123,14 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -296,7 +296,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -363,7 +363,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -376,9 +376,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==1.4.2
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -478,7 +478,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -490,6 +490,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -109,7 +109,7 @@ pydantic==2.5.2
# inflect
pyenchant==3.2.2
# via sphinxcontrib-spelling
pygments==2.15.1
pygments==2.17.2
# via sphinx
pytz==2024.1
# via

View file

@ -122,14 +122,14 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -300,7 +300,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -367,7 +367,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -380,9 +380,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==1.4.2
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -484,7 +484,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -496,6 +496,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -136,14 +136,14 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -324,7 +324,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -399,7 +399,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -412,9 +412,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==1.4.2
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -539,7 +539,7 @@ slack-bolt==1.18.0
# via -r requirements/static/ci/linux.in
slack-sdk==3.21.3
# via slack-bolt
smmap==5.0.0
smmap==5.0.1
# via gitdb
sniffio==1.3.0
# via
@ -556,6 +556,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -40,7 +40,7 @@ pydantic-core==2.16.2
# via pydantic
pydantic==2.6.1
# via python-tools-scripts
pygments==2.15.1
pygments==2.17.2
# via rich
python-dateutil==2.8.2
# via botocore

View file

@ -121,13 +121,15 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt
# aiohttp
# aiosignal
future==1.0.0
# via textfsm
genshi==0.7.7
# via -r requirements/static/ci/common.in
geomet==0.2.1.post1
@ -258,7 +260,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -325,7 +327,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -338,9 +340,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -440,6 +442,7 @@ six==1.15.0
# python-dateutil
# pyvmomi
# pywinrm
# textfsm
# websocket-client
smmap==5.0.1
# via
@ -453,6 +456,8 @@ tempora==5.3.0
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt
# portend
textfsm==1.1.3
# via -r requirements/static/ci/common.in
timelib==0.3.0
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt

View file

@ -170,7 +170,7 @@ filelock==3.13.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# virtualenv
flaky==3.7.0
flaky==3.8.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/pytest.txt
@ -180,7 +180,7 @@ frozenlist==1.4.1
# -c requirements/static/ci/py3.12/linux.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# -c requirements/static/ci/py3.12/linux.txt
# napalm
@ -416,7 +416,7 @@ platformdirs==4.0.0
# via
# -c requirements/static/ci/py3.12/linux.txt
# virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via
# -c requirements/static/ci/py3.12/linux.txt
# pytest
@ -521,7 +521,7 @@ pytest-httpserver==1.0.8
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/pytest.txt
@ -543,11 +543,11 @@ pytest-system-statistics==1.0.2
# via
# -c requirements/static/ci/py3.12/linux.txt
# pytest-salt-factories
pytest-timeout==1.4.2
pytest-timeout==2.3.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/pytest.txt
@ -690,7 +690,7 @@ smbprotocol==1.10.1
# via
# -r requirements/static/ci/cloud.in
# pypsexec
smmap==5.0.0
smmap==5.0.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# gitdb
@ -710,6 +710,7 @@ tempora==5.3.0
textfsm==1.1.3
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -123,14 +123,14 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -296,7 +296,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -363,7 +363,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -376,9 +376,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==1.4.2
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -478,7 +478,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -490,6 +490,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -185,7 +185,7 @@ pydantic==2.5.2
# inflect
pyenchant==3.2.2
# via sphinxcontrib-spelling
pygments==2.15.1
pygments==2.17.2
# via sphinx
pyopenssl==24.0.0
# via

View file

@ -122,14 +122,14 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -300,7 +300,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -367,7 +367,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -380,9 +380,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==1.4.2
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -484,7 +484,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -496,6 +496,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -192,7 +192,7 @@ frozenlist==1.4.1
# -c requirements/static/ci/py3.12/linux.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# -c requirements/static/ci/py3.12/linux.txt
# napalm
@ -691,7 +691,7 @@ slack-sdk==3.21.3
# via
# -c requirements/static/ci/py3.12/linux.txt
# slack-bolt
smmap==5.0.0
smmap==5.0.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# gitdb
@ -717,6 +717,7 @@ tempora==5.3.0
textfsm==1.1.3
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -136,14 +136,14 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# aiohttp
# aiosignal
future==0.18.3
future==1.0.0
# via
# napalm
# textfsm
@ -324,7 +324,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -399,7 +399,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -412,9 +412,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==1.4.2
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -539,7 +539,7 @@ slack-bolt==1.18.0
# via -r requirements/static/ci/linux.in
slack-sdk==3.21.3
# via slack-bolt
smmap==5.0.0
smmap==5.0.1
# via gitdb
sniffio==1.3.0
# via
@ -556,6 +556,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -40,7 +40,7 @@ pydantic-core==2.16.2
# via pydantic
pydantic==2.6.1
# via python-tools-scripts
pygments==2.15.1
pygments==2.17.2
# via rich
python-dateutil==2.8.2
# via botocore

View file

@ -121,13 +121,15 @@ etcd3-py==0.1.6
# via -r requirements/static/ci/common.in
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.12/windows.txt
# aiohttp
# aiosignal
future==1.0.0
# via textfsm
genshi==0.7.7
# via -r requirements/static/ci/common.in
geomet==0.2.1.post1
@ -258,7 +260,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -325,7 +327,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -338,9 +340,9 @@ pytest-subtests==0.4.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -440,6 +442,7 @@ six==1.15.0
# python-dateutil
# pyvmomi
# pywinrm
# textfsm
# websocket-client
smmap==5.0.1
# via
@ -453,6 +456,8 @@ tempora==5.3.0
# via
# -c requirements/static/ci/../pkg/py3.12/windows.txt
# portend
textfsm==1.1.3
# via -r requirements/static/ci/common.in
timelib==0.3.0
# via
# -c requirements/static/ci/../pkg/py3.12/windows.txt

View file

@ -109,7 +109,7 @@ pydantic==1.10.8
# inflect
pyenchant==3.2.2
# via sphinxcontrib-spelling
pygments==2.15.1
pygments==2.17.2
# via sphinx
pytz==2024.1
# via

View file

@ -125,7 +125,7 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
@ -307,7 +307,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -370,7 +370,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -383,9 +383,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -486,7 +486,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -498,6 +498,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -136,7 +136,7 @@ exceptiongroup==1.1.1
# pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
@ -326,7 +326,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -397,7 +397,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -410,9 +410,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -533,7 +533,7 @@ slack-bolt==1.18.0
# via -r requirements/static/ci/linux.in
slack-sdk==3.21.3
# via slack-bolt
smmap==5.0.0
smmap==5.0.1
# via gitdb
sniffio==1.3.0
# via
@ -550,6 +550,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -124,13 +124,15 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.8/windows.txt
# aiohttp
# aiosignal
future==1.0.0
# via textfsm
genshi==0.7.7
# via -r requirements/static/ci/common.in
geomet==0.2.1.post1
@ -265,7 +267,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -328,7 +330,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -341,9 +343,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -444,6 +446,7 @@ six==1.15.0
# python-dateutil
# pyvmomi
# pywinrm
# textfsm
# websocket-client
smmap==5.0.1
# via
@ -457,6 +460,8 @@ tempora==5.3.0
# via
# -c requirements/static/ci/../pkg/py3.8/windows.txt
# portend
textfsm==1.1.3
# via -r requirements/static/ci/common.in
timelib==0.3.0
# via
# -c requirements/static/ci/../pkg/py3.8/windows.txt

View file

@ -126,7 +126,7 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
@ -299,7 +299,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -362,7 +362,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -375,9 +375,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -477,7 +477,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -489,6 +489,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -105,7 +105,7 @@ pydantic==1.10.8
# inflect
pyenchant==3.2.2
# via sphinxcontrib-spelling
pygments==2.15.1
pygments==2.17.2
# via sphinx
pytz==2024.1
# via

View file

@ -125,7 +125,7 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
@ -303,7 +303,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -366,7 +366,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -379,9 +379,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -482,7 +482,7 @@ six==1.16.0
# transitions
# vcert
# websocket-client
smmap==5.0.0
smmap==5.0.1
# via gitdb
sqlparse==0.4.4
# via -r requirements/static/ci/common.in
@ -494,6 +494,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -136,7 +136,7 @@ exceptiongroup==1.1.1
# pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
@ -322,7 +322,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -393,7 +393,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -406,9 +406,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -529,7 +529,7 @@ slack-bolt==1.18.0
# via -r requirements/static/ci/linux.in
slack-sdk==3.21.3
# via slack-bolt
smmap==5.0.0
smmap==5.0.1
# via gitdb
sniffio==1.3.0
# via
@ -546,6 +546,7 @@ tempora==5.3.0
# portend
textfsm==1.1.3
# via
# -r requirements/static/ci/common.in
# napalm
# netmiko
# ntc-templates

View file

@ -40,7 +40,7 @@ pydantic-core==2.16.2
# via pydantic
pydantic==2.6.1
# via python-tools-scripts
pygments==2.13.0
pygments==2.17.2
# via rich
python-dateutil==2.8.2
# via botocore

View file

@ -124,13 +124,15 @@ exceptiongroup==1.1.1
# via pytest
filelock==3.13.1
# via virtualenv
flaky==3.7.0
flaky==3.8.1
# via -r requirements/pytest.txt
frozenlist==1.4.1
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt
# aiohttp
# aiosignal
future==1.0.0
# via textfsm
genshi==0.7.7
# via -r requirements/static/ci/common.in
geomet==0.2.1.post1
@ -261,7 +263,7 @@ pathspec==0.11.1
# via yamllint
platformdirs==4.0.0
# via virtualenv
pluggy==1.0.0
pluggy==1.4.0
# via pytest
portend==3.1.0
# via
@ -324,7 +326,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc29
pytest-salt-factories==1.0.1
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -337,9 +339,9 @@ pytest-subtests==0.11.0
# via -r requirements/pytest.txt
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-timeout==2.1.0
pytest-timeout==2.3.1
# via -r requirements/pytest.txt
pytest==7.3.2
pytest==8.1.1
# via
# -r requirements/pytest.txt
# pytest-custom-exit-code
@ -440,6 +442,7 @@ six==1.15.0
# python-dateutil
# pyvmomi
# pywinrm
# textfsm
# websocket-client
smmap==5.0.1
# via
@ -453,6 +456,8 @@ tempora==5.3.0
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt
# portend
textfsm==1.1.3
# via -r requirements/static/ci/common.in
timelib==0.3.0
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt

View file

@ -135,6 +135,7 @@ def update():
cached_file_path = _get_cached_file_name(
bucket, saltenv, file_path
)
log.debug("%s - %s : %s", bucket, saltenv, file_path)
# load the file from S3 if it's not in the cache or it's old
@ -356,6 +357,7 @@ def _init():
# check mtime of the buckets files cache
metadata = None
try:
if os.path.getmtime(cache_file) > exp:
metadata = _read_buckets_cache_file(cache_file)
@ -366,6 +368,8 @@ def _init():
# bucket files cache expired or does not exist
metadata = _refresh_buckets_cache_file(cache_file)
_prune_deleted_files(metadata)
return metadata
@ -374,7 +378,6 @@ def _get_cache_dir():
Return the path to the s3cache dir
"""
# Or is that making too many assumptions?
return os.path.join(__opts__["cachedir"], "s3cache")
@ -383,26 +386,15 @@ def _get_cached_file_name(bucket_name, saltenv, path):
Return the cached file name for a bucket path file
"""
file_path = os.path.join(_get_cache_dir(), saltenv, bucket_name, path)
# make sure bucket and saltenv directories exist
if not os.path.exists(os.path.dirname(file_path)):
os.makedirs(os.path.dirname(file_path))
return file_path
return os.path.join(_get_cache_dir(), saltenv, bucket_name, path)
def _get_buckets_cache_filename():
"""
Return the filename of the cache for bucket contents.
Create the path if it does not exist.
"""
cache_dir = _get_cache_dir()
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
return os.path.join(cache_dir, "buckets_files.cache")
return os.path.join(_get_cache_dir(), "buckets_files.cache")
def _refresh_buckets_cache_file(cache_file):
@ -423,6 +415,7 @@ def _refresh_buckets_cache_file(cache_file):
path_style,
https_enable,
) = _get_s3_key()
metadata = {}
# helper s3 query function
@ -571,10 +564,72 @@ def _refresh_buckets_cache_file(cache_file):
return metadata
def _prune_deleted_files(metadata):
cache_dir = _get_cache_dir()
cached_files = set()
roots = set()
if _is_env_per_bucket():
for env, env_data in metadata.items():
for bucket_meta in env_data:
for bucket, bucket_data in bucket_meta.items():
root = os.path.join(cache_dir, env, bucket)
if os.path.exists(root):
roots.add(root)
for meta in bucket_data:
path = meta["Key"]
cached_files.add(path)
else:
for env, env_data in metadata.items():
for bucket in _get_buckets():
root = os.path.join(cache_dir, bucket)
if os.path.exists(root):
roots.add(root)
for meta in env_data:
cached_files.add(meta["Key"])
if log.isEnabledFor(logging.DEBUG):
import pprint
log.debug("cached file list:\n%s", pprint.pformat(cached_files))
for root in roots:
for base, dirs, files in os.walk(root):
for file_name in files:
path = os.path.join(base, file_name)
relpath = os.path.relpath(path, root)
if relpath not in cached_files:
log.debug("File '%s' not found in cached file list", path)
log.info(
"File '%s' was deleted from bucket, deleting local copy",
relpath,
)
os.unlink(path)
dirname = os.path.dirname(path)
# delete empty dirs all the way up to the cache dir
while dirname != cache_dir and len(os.listdir(dirname)) == 0:
log.debug("Directory '%s' is now empty, removing", dirname)
os.rmdir(dirname)
dirname = os.path.dirname(dirname)
def _write_buckets_cache_file(metadata, cache_file):
"""
Write the contents of the buckets cache file
"""
cache_dir = _get_cache_dir()
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
if os.path.isfile(cache_file):
os.remove(cache_file)
@ -591,6 +646,10 @@ def _read_buckets_cache_file(cache_file):
log.debug("Reading buckets cache file")
if not os.path.exists(cache_file):
log.debug("Cache file does not exist")
return None
with salt.utils.files.fopen(cache_file, "rb") as fp_:
try:
data = pickle.load(fp_)
@ -698,6 +757,13 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
Checks the local cache for the file, if it's old or missing go grab the
file from S3 and update the cache
"""
# make sure bucket and saltenv directories exist
target_dir = os.path.dirname(cached_file_path)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
(
key,
keyid,

View file

@ -20,6 +20,7 @@ import re
import shutil
import tempfile
import time
from collections import OrderedDict
from urllib.error import HTTPError
from urllib.request import Request as _Request
from urllib.request import urlopen as _urlopen
@ -204,23 +205,24 @@ if not HAS_APT:
repo_line.append(self.type)
opts = _get_opts(self.line)
if self.architectures:
archs = ",".join(self.architectures)
opts["arch"]["full"] = f"arch={archs}"
if "arch" not in opts:
opts["arch"] = {}
opts["arch"]["full"] = f"arch={','.join(self.architectures)}"
opts["arch"]["value"] = self.architectures
if self.signedby:
if "signedby" not in opts:
opts["signedby"] = {}
opts["signedby"]["full"] = f"signed-by={self.signedby}"
opts["signedby"]["value"] = self.signedby
ordered_opts = [
opt_type for opt_type, opt in opts.items() if opt["full"] != ""
]
ordered_opts = []
for opt in opts.values():
if opt["full"] != "":
ordered_opts[opt["index"]] = opt["full"]
ordered_opts.append(opt["full"])
if ordered_opts:
repo_line.append("[{}]".format(" ".join(ordered_opts)))
repo_line.append(f"[{' '.join(ordered_opts)}]")
repo_line += [self.uri, self.dist, " ".join(self.comps)]
if self.comment:
@ -237,10 +239,12 @@ if not HAS_APT:
if repo_line[1].startswith("["):
repo_line = [x for x in (line.strip("[]") for line in repo_line) if x]
opts = _get_opts(self.line)
self.architectures.extend(opts["arch"]["value"])
self.signedby = opts["signedby"]["value"]
for opt in opts:
opt = opts[opt]["full"]
if "arch" in opts:
self.architectures.extend(opts["arch"]["value"])
if "signedby" in opts:
self.signedby = opts["signedby"]["value"]
for opt in opts.values():
opt = opt["full"]
if opt:
try:
repo_line.pop(repo_line.index(opt))
@ -1751,31 +1755,27 @@ def _get_opts(line):
Return all opts in [] for a repo line
"""
get_opts = re.search(r"\[(.*=.*)\]", line)
ret = {
"arch": {"full": "", "value": "", "index": 0},
"signedby": {"full": "", "value": "", "index": 0},
}
ret = OrderedDict()
if not get_opts:
return ret
opts = get_opts.group(0).strip("[]")
architectures = []
for idx, opt in enumerate(opts.split()):
for opt in opts.split():
if opt.startswith("arch"):
architectures.extend(opt.split("=", 1)[1].split(","))
ret["arch"] = {}
ret["arch"]["full"] = opt
ret["arch"]["value"] = architectures
ret["arch"]["index"] = idx
elif opt.startswith("signed-by"):
ret["signedby"] = {}
ret["signedby"]["full"] = opt
ret["signedby"]["value"] = opt.split("=", 1)[1]
ret["signedby"]["index"] = idx
else:
other_opt = opt.split("=", 1)[0]
ret[other_opt] = {}
ret[other_opt]["full"] = opt
ret[other_opt]["value"] = opt.split("=", 1)[1]
ret[other_opt]["index"] = idx
return ret
@ -1788,7 +1788,11 @@ def _split_repo_str(repo):
if not HAS_APT:
signedby = entry.signedby
else:
signedby = _get_opts(line=repo)["signedby"].get("value", "")
opts = _get_opts(line=repo)
if "signedby" in opts:
signedby = opts["signedby"].get("value", "")
else:
signedby = ""
if signedby:
# python3-apt does not support signedby. So if signedby
# is in the repo we have to check our code to see if the
@ -1956,7 +1960,12 @@ def list_repos(**kwargs):
if not HAS_APT:
signedby = source.signedby
else:
signedby = _get_opts(line=source.line)["signedby"].get("value", "")
opts = _get_opts(line=source.line)
if "signedby" in opts:
signedby = opts["signedby"].get("value", "")
else:
signedby = ""
repo = {}
repo["file"] = source.file
repo["comps"] = getattr(source, "comps", [])
@ -2976,7 +2985,11 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs):
if not HAS_APT:
signedby = mod_source.signedby
else:
signedby = _get_opts(repo)["signedby"].get("value", "")
opts = _get_opts(repo)
if "signedby" in opts:
signedby = opts["signedby"].get("value", "")
else:
signedby = ""
return {
repo: {
@ -3077,7 +3090,11 @@ def _expand_repo_def(os_name, os_codename=None, **kwargs):
signedby = source_entry.signedby
kwargs["signedby"] = signedby
else:
signedby = _get_opts(repo)["signedby"].get("value", "")
opts = _get_opts(repo)
if "signedby" in opts:
signedby = opts["signedby"].get("value", "")
else:
signedby = ""
_source_entry = source_list.add(
type=source_entry.type,

View file

@ -551,7 +551,7 @@ def install(
cmd.append("-S")
if refresh is True:
cmd.append("-y")
if sysupgrade is True or (sysupgrade is None and refresh is True):
if sysupgrade is True:
cmd.append("-u")
cmd.extend(["--noprogressbar", "--noconfirm", "--needed"])
wildcards = []

View file

@ -19,7 +19,7 @@ inside the renderer (Jinja, Mako, Genshi, etc.).
import logging
import os
from salt.utils.files import fopen
import salt.utils.files
try:
import textfsm
@ -188,11 +188,14 @@ def extract(template_path, raw_text=None, raw_text_file=None, saltenv="base"):
# Disabling pylint W8470 to nto complain about fopen.
# Unfortunately textFSM needs the file handle rather than the content...
# pylint: disable=W8470
tpl_file_handle = fopen(tpl_cached_path, "r")
# pylint: disable=W8470
log.debug(tpl_file_handle.read())
tpl_file_handle.seek(0) # move the object position back at the top of the file
fsm_handler = textfsm.TextFSM(tpl_file_handle)
with salt.utils.files.fopen(tpl_cached_path, "r") as tpl_file_handle:
# pylint: disable=W8470
tpl_file_data = tpl_file_handle.read()
log.debug(tpl_file_data)
tpl_file_handle.seek(
0
) # move the object position back at the top of the file
fsm_handler = textfsm.TextFSM(tpl_file_handle)
except textfsm.TextFSMTemplateError as tfte:
log.error("Unable to parse the TextFSM template", exc_info=True)
ret["comment"] = (

View file

@ -329,7 +329,7 @@ class daclConstants:
return path
def _getUserSid(user):
def _get_user_sid(user):
"""
return a state error dictionary, with 'sid' as a field if it could be returned
if user is None, sid will also be None
@ -413,7 +413,7 @@ def get(path, objectType, user=None):
"""
ret = {"Path": path, "ACLs": []}
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if path and objectType:
dc = daclConstants()
@ -458,7 +458,7 @@ def add_ace(path, objectType, user, permission, acetype, propagation):
acetype = acetype.strip().upper()
propagation = propagation.strip().upper()
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if not sidRet["result"]:
return sidRet
permissionbit = dc.getPermissionBit(objectTypeBit, permission)
@ -555,7 +555,7 @@ def rm_ace(path, objectType, user, permission=None, acetype=None, propagation=No
if check_ace(path, objectType, user, permission, acetype, propagation, True)[
"Exists"
]:
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if not sidRet["result"]:
return sidRet
permissionbit = (
@ -804,7 +804,7 @@ def check_inheritance(path, objectType, user=None):
ret = {"result": False, "Inheritance": False, "comment": ""}
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
dc = daclConstants()
objectType = dc.getObjectTypeBit(objectType)
@ -880,7 +880,7 @@ def check_ace(
dc.getPropagationBit(objectTypeBit, propagation) if propagation else None
)
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if not sidRet["result"]:
return sidRet

View file

@ -47,6 +47,7 @@ import time
import urllib.parse
from functools import cmp_to_key
import salt.fileserver
import salt.payload
import salt.syspaths
import salt.utils.args
@ -907,7 +908,7 @@ def refresh_db(**kwargs):
The database is stored in a serialized format located by default at the
following location:
``C:\salt\var\cache\salt\minion\files\base\win\repo-ng\winrepo.p``
``C:\ProgramData\Salt Project\Salt\var\cache\salt\minion\files\base\win\repo-ng\winrepo.p``
This module performs the following steps to generate the software metadata
database:
@ -915,7 +916,7 @@ def refresh_db(**kwargs):
- Fetch the package definition files (.sls) from `winrepo_source_dir`
(default `salt://win/repo-ng`) and cache them in
`<cachedir>\files\<saltenv>\<winrepo_source_dir>`
(default: ``C:\salt\var\cache\salt\minion\files\base\win\repo-ng``)
(default: ``C:\ProgramData\Salt Project\Salt\var\cache\salt\minion\files\base\win\repo-ng``)
- Call :py:func:`pkg.genrepo <salt.modules.win_pkg.genrepo>` to parse the
package definition files and generate the repository metadata database
file (`winrepo.p`)
@ -976,7 +977,7 @@ def refresh_db(**kwargs):
.. warning::
When calling this command from a state using `module.run` be sure to
pass `failhard: False`. Otherwise the state will report failure if it
pass `failhard: False`. Otherwise, the state will report failure if it
encounters a bad software definition file.
CLI Example:
@ -1020,6 +1021,11 @@ def refresh_db(**kwargs):
"Failed to clear one or more winrepo cache files", info={"failed": failed}
)
# Clear the cache so that newly copied package definitions will be picked up
fileserver = salt.fileserver.Fileserver(__opts__)
load = {"saltenv": saltenv, "fsbackend": None}
fileserver.clear_file_list_cache(load=load)
# Cache repo-ng locally
log.info("Fetching *.sls files from %s", repo_details.winrepo_source_dir)
try:
@ -1170,10 +1176,11 @@ def genrepo(**kwargs):
if name.endswith(".sls"):
total_files_processed += 1
_repo_process_pkg_sls(
os.path.join(root, name),
os.path.join(short_path, name),
ret,
successful_verbose,
filename=os.path.join(root, name),
short_path_name=os.path.join(short_path, name),
ret=ret,
successful_verbose=successful_verbose,
saltenv=saltenv,
)
with salt.utils.files.fopen(repo_details.winrepo_file, "wb") as repo_cache:
@ -1212,7 +1219,9 @@ def genrepo(**kwargs):
return results
def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose):
def _repo_process_pkg_sls(
filename, short_path_name, ret, successful_verbose, saltenv="base"
):
renderers = salt.loader.render(__opts__, __salt__)
def _failed_compile(prefix_msg, error_msg):
@ -1227,6 +1236,7 @@ def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose):
__opts__["renderer"],
__opts__.get("renderer_blacklist", ""),
__opts__.get("renderer_whitelist", ""),
saltenv=saltenv,
)
except SaltRenderError as exc:
return _failed_compile("Failed to compile", exc)
@ -2359,7 +2369,23 @@ def _get_name_map(saltenv="base"):
def get_package_info(name, saltenv="base"):
"""
Return package info. Returns empty map if package not available.
Get information about the package as found in the winrepo database
Args:
name (str): The name of the package
saltenv (str): The salt environment to use. Default is "base"
Returns:
dict: A dictionary of package info, empty if package not available
CLI Example:
.. code-block:: bash
salt '*' pkg.get_package_info chrome
"""
return _get_package_info(name=name, saltenv=saltenv)

View file

@ -23,7 +23,7 @@ import salt.utils.json
import salt.utils.platform
import salt.utils.powershell
import salt.utils.versions
from salt.exceptions import SaltInvocationError
from salt.exceptions import CommandExecutionError, SaltInvocationError
_DEFAULT_CONTEXT = "LocalMachine"
_DEFAULT_FORMAT = "cer"
@ -73,15 +73,19 @@ def _cmd_run(cmd, as_json=False):
"".join(cmd_full), shell="powershell", python_shell=True
)
if cmd_ret["retcode"] != 0:
_LOG.error("Unable to execute command: %s\nError: %s", cmd, cmd_ret["stderr"])
if cmd_ret["stderr"]:
raise CommandExecutionError(
"Unable to execute command: {}\nError: {}".format(cmd, cmd_ret["stderr"])
)
if as_json:
try:
items = salt.utils.json.loads(cmd_ret["stdout"], strict=False)
return items
except ValueError:
_LOG.error("Unable to parse return data as Json.")
raise CommandExecutionError(
"Unable to parse return data as JSON:\n{}".format(cmd_ret["stdout"])
)
return cmd_ret["stdout"]

View file

@ -22,6 +22,7 @@ Module for managing Windows Users.
This currently only works with local user accounts, not domain accounts
"""
import ctypes
import logging
import shlex
import time
@ -30,6 +31,8 @@ from datetime import datetime
import salt.utils.args
import salt.utils.dateutils
import salt.utils.platform
import salt.utils.versions
import salt.utils.win_reg
import salt.utils.winapi
from salt.exceptions import CommandExecutionError
@ -82,7 +85,7 @@ def add(
Add a user to the minion.
Args:
name (str): User name
name (str): The username for the new account
password (str, optional): User's password in plain text.
@ -106,7 +109,7 @@ def add(
logs on.
Returns:
bool: True if successful. False is unsuccessful.
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -116,10 +119,13 @@ def add(
"""
user_info = {}
if name:
user_info["name"] = name
user_info["name"] = str(name)
else:
return False
user_info["password"] = password
if password:
user_info["password"] = str(password)
else:
user_info["password"] = None
user_info["priv"] = win32netcon.USER_PRIV_USER
user_info["home_dir"] = home
user_info["comment"] = description
@ -160,13 +166,13 @@ def update(
):
# pylint: disable=anomalous-backslash-in-string
"""
Updates settings for the windows user. Name is the only required parameter.
Updates settings for the Windows user. Name is the only required parameter.
Settings will only be changed if the parameter is passed a value.
.. versionadded:: 2015.8.0
Args:
name (str): The user name to update.
name (str): The username to update.
password (str, optional): New user password in plain text.
@ -206,7 +212,7 @@ def update(
changing the password. False allows the user to change the password.
Returns:
bool: True if successful. False is unsuccessful.
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -219,7 +225,7 @@ def update(
# Make sure the user exists
# Return an object containing current settings for the user
try:
user_info = win32net.NetUserGetInfo(None, name, 4)
user_info = win32net.NetUserGetInfo(None, str(name), 4)
except win32net.error as exc:
log.error("Failed to update user %s", name)
log.error("nbr: %s", exc.winerror)
@ -230,7 +236,9 @@ def update(
# Check parameters to update
# Update the user object with new settings
if password:
user_info["password"] = password
user_info["password"] = str(password)
else:
user_info["password"] = None
if home:
user_info["home_dir"] = home
if homedrive:
@ -251,7 +259,7 @@ def update(
dt_obj = salt.utils.dateutils.date_cast(expiration_date)
except (ValueError, RuntimeError):
return f"Invalid Date/Time Format: {expiration_date}"
user_info["acct_expires"] = time.mktime(dt_obj.timetuple())
user_info["acct_expires"] = int(dt_obj.timestamp())
if expired is not None:
if expired:
user_info["password_expired"] = 1
@ -263,6 +271,7 @@ def update(
else:
user_info["flags"] &= ~win32netcon.UF_ACCOUNTDISABLE
if unlock_account is not None:
# We can only unlock with this flag... we can't unlock
if unlock_account:
user_info["flags"] &= ~win32netcon.UF_LOCKOUT
if password_never_expires is not None:
@ -278,7 +287,7 @@ def update(
# Apply new settings
try:
win32net.NetUserSetInfo(None, name, 4, user_info)
win32net.NetUserSetInfo(None, str(name), 4, user_info)
except win32net.error as exc:
log.error("Failed to update user %s", name)
log.error("nbr: %s", exc.winerror)
@ -305,7 +314,7 @@ def delete(name, purge=False, force=False):
user out and delete user.
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -315,7 +324,7 @@ def delete(name, purge=False, force=False):
"""
# Check if the user exists
try:
user_info = win32net.NetUserGetInfo(None, name, 4)
user_info = win32net.NetUserGetInfo(None, str(name), 4)
except win32net.error as exc:
log.error("User not found: %s", name)
log.error("nbr: %s", exc.winerror)
@ -367,7 +376,7 @@ def delete(name, purge=False, force=False):
# Remove the User Profile directory
if purge:
try:
sid = getUserSid(name)
sid = get_user_sid(name)
win32profile.DeleteProfile(sid)
except pywintypes.error as exc:
(number, context, message) = exc.args
@ -382,7 +391,7 @@ def delete(name, purge=False, force=False):
# And finally remove the user account
try:
win32net.NetUserDel(None, name)
win32net.NetUserDel(None, str(name))
except win32net.error as exc:
log.error("Failed to delete user %s", name)
log.error("nbr: %s", exc.winerror)
@ -394,11 +403,28 @@ def delete(name, purge=False, force=False):
def getUserSid(username):
"""
Deprecated function. Please use get_user_sid instead
CLI Example:
.. code-block:: bash
salt '*' user.get_user_sid jsnuffy
"""
salt.utils.versions.warn_until(
version=3009,
message="'getUserSid' is being deprecated. Please use get_user_sid instead",
)
return get_user_sid(username)
def get_user_sid(username):
"""
Get the Security ID for the user
Args:
username (str): The user name for which to look up the SID
username (str): The username for which to look up the SID
Returns:
str: The user SID
@ -407,7 +433,7 @@ def getUserSid(username):
.. code-block:: bash
salt '*' user.getUserSid jsnuffy
salt '*' user.get_user_sid jsnuffy
"""
domain = win32api.GetComputerName()
if username.find("\\") != -1:
@ -424,12 +450,12 @@ def setpassword(name, password):
Set the user's password
Args:
name (str): The user name for which to set the password
name (str): The username for which to set the password
password (str): The new password
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -445,12 +471,12 @@ def addgroup(name, group):
Add user to a group
Args:
name (str): The user name to add to the group
name (str): The username to add to the group
group (str): The name of the group to which to add the user
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -458,7 +484,7 @@ def addgroup(name, group):
salt '*' user.addgroup jsnuffy 'Power Users'
"""
name = shlex.quote(name)
name = shlex.quote(str(name))
group = shlex.quote(group).lstrip("'").rstrip("'")
user = info(name)
@ -478,12 +504,12 @@ def removegroup(name, group):
Remove user from a group
Args:
name (str): The user name to remove from the group
name (str): The username to remove from the group
group (str): The name of the group from which to remove the user
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -491,7 +517,7 @@ def removegroup(name, group):
salt '*' user.removegroup jsnuffy 'Power Users'
"""
name = shlex.quote(name)
name = shlex.quote(str(name))
group = shlex.quote(group).lstrip("'").rstrip("'")
user = info(name)
@ -519,7 +545,7 @@ def chhome(name, home, **kwargs):
home (str): The new location of the home directory
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -562,7 +588,7 @@ def chprofile(name, profile):
profile (str): The new location of the profile
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -578,12 +604,12 @@ def chfullname(name, fullname):
Change the full name of the user
Args:
name (str): The user name for which to change the full name
name (str): The username for which to change the full name
fullname (str): The new value for the full name
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -600,7 +626,7 @@ def chgroups(name, groups, append=True):
member of only the specified groups
Args:
name (str): The user name for which to change groups
name (str): The username for which to change groups
groups (str, list): A single group or a list of groups to assign to the
user. For multiple groups this can be a comma delimited string or a
@ -611,7 +637,7 @@ def chgroups(name, groups, append=True):
only. Default is True.
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -623,21 +649,31 @@ def chgroups(name, groups, append=True):
groups = groups.split(",")
groups = [x.strip(" *") for x in groups]
ugrps = set(list_groups(name))
if ugrps == set(groups):
return True
current_groups = set(list_groups(name))
expected_groups = set()
name = shlex.quote(name)
name = shlex.quote(str(name))
if not append:
for group in ugrps:
# We don't want to append to the list, remove groups not in the new set
# of groups
for group in current_groups:
group = shlex.quote(group).lstrip("'").rstrip("'")
if group not in groups:
cmd = f'net localgroup "{group}" {name} /delete'
__salt__["cmd.run_all"](cmd, python_shell=True)
else:
expected_groups.add(group)
else:
# We're appending to the current list of groups. If they already match
# then bail
if current_groups == set(groups):
return True
else:
expected_groups = current_groups.union(set(groups))
for group in groups:
if group in ugrps:
if group in current_groups:
continue
group = shlex.quote(group).lstrip("'").rstrip("'")
cmd = f'net localgroup "{group}" {name} /add'
@ -646,8 +682,9 @@ def chgroups(name, groups, append=True):
log.error(out["stdout"])
return False
agrps = set(list_groups(name))
return len(ugrps - agrps) == 0
new_groups = set(list_groups(name))
return len(expected_groups - new_groups) == 0
def info(name):
@ -677,6 +714,7 @@ def info(name):
- last_logon
- account_disabled
- account_locked
- expiration_date
- password_never_expires
- disallow_change_password
- gid
@ -690,14 +728,14 @@ def info(name):
ret = {}
items = {}
try:
items = win32net.NetUserGetInfo(None, name, 4)
items = win32net.NetUserGetInfo(None, str(name), 4)
except win32net.error:
pass
if items:
groups = []
try:
groups = win32net.NetUserGetLocalGroups(None, name)
groups = win32net.NetUserGetLocalGroups(None, str(name))
except win32net.error:
pass
@ -722,9 +760,15 @@ def info(name):
ret["last_logon"] = datetime.fromtimestamp(items["last_logon"]).strftime(
"%Y-%m-%d %H:%M:%S"
)
ret["expiration_date"] = datetime.fromtimestamp(items["acct_expires"]).strftime(
"%Y-%m-%d %H:%M:%S"
)
# If the value is -1 or 0xFFFFFFFF, it is set to never expire
if items["acct_expires"] == ctypes.c_ulong(win32netcon.TIMEQ_FOREVER).value:
ret["expiration_date"] = "Never"
else:
ret["expiration_date"] = datetime.fromtimestamp(
items["acct_expires"]
).strftime("%Y-%m-%d %H:%M:%S")
ret["expired"] = items["password_expired"] == 1
if not ret["profile"]:
ret["profile"] = _get_userprofile_from_registry(name, ret["uid"])
@ -765,17 +809,17 @@ def _get_userprofile_from_registry(user, sid):
registry
Args:
user (str): The user name, used in debug message
user (str): The username, used in debug message
sid (str): The sid to lookup in the registry
Returns:
str: Profile directory
"""
profile_dir = __utils__["reg.read_value"](
"HKEY_LOCAL_MACHINE",
f"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\{sid}",
"ProfileImagePath",
profile_dir = salt.utils.win_reg.read_value(
hive="HKEY_LOCAL_MACHINE",
key=f"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\{sid}",
vname="ProfileImagePath",
)["vdata"]
log.debug('user %s with sid=%s profile is located at "%s"', user, sid, profile_dir)
return profile_dir
@ -786,7 +830,7 @@ def list_groups(name):
Return a list of groups the named user belongs to
Args:
name (str): The user name for which to list groups
name (str): The username for which to list groups
Returns:
list: A list of groups to which the user belongs
@ -829,9 +873,9 @@ def getent(refresh=False):
return __context__["user.getent"]
ret = []
for user in __salt__["user.list_users"]():
for user in list_users():
stuff = {}
user_info = __salt__["user.info"](user)
user_info = info(user)
stuff["gid"] = ""
stuff["groups"] = user_info["groups"]
@ -885,12 +929,12 @@ def rename(name, new_name):
Change the username for a named user
Args:
name (str): The user name to change
name (str): The username to change
new_name (str): The new name for the current user
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:

View file

@ -4005,13 +4005,25 @@ def directory(
if not force:
return _error(
ret,
"File exists where the backup target {} should go".format(
backupname
),
f"File exists where the backup target {backupname} should go",
)
if __opts__["test"]:
ret["changes"][
"forced"
] = f"Existing file at backup path {backupname} would be removed"
else:
__salt__["file.remove"](backupname)
os.rename(name, backupname)
if __opts__["test"]:
ret["changes"]["backup"] = f"{name} would be renamed to {backupname}"
ret["changes"][name] = {"directory": "new"}
ret["comment"] = (
f"{name} would be backed up and replaced with a new directory"
)
ret["result"] = None
return ret
else:
os.rename(name, backupname)
elif force:
# Remove whatever is in the way
if os.path.isfile(name):

View file

@ -355,6 +355,19 @@ def query(
agent = f"{agent} http.query()"
header_dict["User-agent"] = agent
if (
proxy_host
and proxy_port
and method == "POST"
and "Content-Type" not in header_dict
):
log.debug(
"Content-Type not provided for POST request, assuming application/x-www-form-urlencoded"
)
header_dict["Content-Type"] = "application/x-www-form-urlencoded"
if "Content-Length" not in header_dict:
header_dict["Content-Length"] = f"{len(data)}"
if backend == "requests":
sess = requests.Session()
sess.auth = auth

View file

@ -184,11 +184,23 @@ def get_sam_name(username):
.. note:: Long computer names are truncated to 15 characters
"""
# Some special identity groups require special handling. They do not have
# the domain prepended to the name. They should be added here as they are
# discovered. Use the SID to be locale agnostic.
# Everyone: S-1-1-0
special_id_groups = ["S-1-1-0"]
try:
sid_obj = win32security.LookupAccountName(None, username)[0]
except pywintypes.error:
return "\\".join([platform.node()[:15].upper(), username])
sid = win32security.ConvertSidToStringSid(sid_obj)
username, domain, _ = win32security.LookupAccountSid(None, sid_obj)
if sid in special_id_groups:
return username
return "\\".join([domain, username])

View file

@ -1051,7 +1051,9 @@ def load_file_or_bytes(fob):
with salt.utils.files.fopen(fob, "rb") as f:
fob = f.read()
if isinstance(fob, str):
if PEM_BEGIN.decode() in fob:
if fob.startswith("b64:"):
fob = base64.b64decode(fob[4:])
elif PEM_BEGIN.decode() in fob:
fob = fob.encode()
else:
try:

View file

@ -5,18 +5,16 @@
import logging
import os
import pathlib
import pprint
import re
import shutil
import stat
import sys
from functools import lru_cache, partial, wraps
from functools import lru_cache
from unittest import TestCase # pylint: disable=blacklisted-module
import _pytest.logging
import _pytest.skipping
import more_itertools
import psutil
import pytest
import salt
@ -448,7 +446,6 @@ def pytest_collection_modifyitems(config, items):
groups_collection_modifyitems(config, items)
from_filenames_collection_modifyitems(config, items)
log.warning("Modifying collected tests to keep track of fixture usage")
timeout_marker_tests_paths = (
str(PYTESTS_DIR / "pkg"),
str(PYTESTS_DIR / "scenarios"),
@ -477,103 +474,6 @@ def pytest_collection_modifyitems(config, items):
# Default to counting only the test execution for the timeouts, ie,
# withough including the fixtures setup time towards the timeout.
item.add_marker(pytest.mark.timeout(90, func_only=True))
for fixture in item.fixturenames:
if fixture not in item._fixtureinfo.name2fixturedefs:
continue
for fixturedef in item._fixtureinfo.name2fixturedefs[fixture]:
if fixturedef.scope != "package":
continue
try:
fixturedef.finish.__wrapped__
except AttributeError:
original_func = fixturedef.finish
def wrapper(func, fixturedef):
@wraps(func)
def wrapped(self, request, nextitem=False):
try:
return self._finished
except AttributeError:
if nextitem:
fpath = pathlib.Path(self.baseid).resolve()
tpath = pathlib.Path(
nextitem.fspath.strpath
).resolve()
try:
tpath.relative_to(fpath)
# The test module is within the same package that the fixture is
if (
not request.session.shouldfail
and not request.session.shouldstop
):
log.debug(
"The next test item is still under the"
" fixture package path. Not"
" terminating %s",
self,
)
return
except ValueError:
pass
log.debug("Finish called on %s", self)
try:
return func(request)
except (
BaseException # pylint: disable=broad-except
) as exc:
pytest.fail(
"Failed to run finish() on {}: {}".format(
fixturedef, exc
),
pytrace=True,
)
finally:
self._finished = True
return partial(wrapped, fixturedef)
fixturedef.finish = wrapper(fixturedef.finish, fixturedef)
try:
fixturedef.finish.__wrapped__
except AttributeError:
fixturedef.finish.__wrapped__ = original_func
@pytest.hookimpl(trylast=True, hookwrapper=True)
def pytest_runtest_protocol(item, nextitem):
"""
implements the runtest_setup/call/teardown protocol for
the given test item, including capturing exceptions and calling
reporting hooks.
:arg item: test item for which the runtest protocol is performed.
:arg nextitem: the scheduled-to-be-next test item (or None if this
is the end my friend). This argument is passed on to
:py:func:`pytest_runtest_teardown`.
:return boolean: True if no further hook implementations should be invoked.
Stops at first non-None result, see :ref:`firstresult`
"""
request = item._request
used_fixture_defs = []
for fixture in item.fixturenames:
if fixture not in item._fixtureinfo.name2fixturedefs:
continue
for fixturedef in reversed(item._fixtureinfo.name2fixturedefs[fixture]):
if fixturedef.scope != "package":
continue
used_fixture_defs.append(fixturedef)
try:
# Run the test
yield
finally:
for fixturedef in used_fixture_defs:
fixturedef.finish(request, nextitem=nextitem)
del request
del used_fixture_defs
def pytest_markeval_namespace(config):
@ -1392,7 +1292,6 @@ def salt_call_cli(salt_minion_factory):
@pytest.fixture(scope="session", autouse=True)
def bridge_pytest_and_runtests(
reap_stray_processes,
salt_factories,
salt_syndic_master_factory,
salt_syndic_factory,
@ -1429,6 +1328,8 @@ def bridge_pytest_and_runtests(
salt_syndic_factory.config["conf_file"]
)
RUNTIME_VARS.TMP_SSH_CONF_DIR = str(sshd_config_dir)
with reap_stray_processes():
yield
@pytest.fixture(scope="session")
@ -1496,7 +1397,21 @@ def sshd_server(salt_factories, sshd_config_dir, salt_master, grains):
@pytest.fixture(scope="module")
def salt_ssh_roster_file(sshd_server, salt_master):
def known_hosts_file(sshd_server, salt_master, salt_factories):
with pytest.helpers.temp_file(
"ssh-known-hosts",
"\n".join(sshd_server.get_host_keys()),
salt_factories.tmp_root_dir,
) as known_hosts_file, pytest.helpers.temp_file(
"master.d/ssh-known-hosts.conf",
f"known_hosts_file: {known_hosts_file}",
salt_master.config_dir,
):
yield known_hosts_file
@pytest.fixture(scope="module")
def salt_ssh_roster_file(sshd_server, salt_master, known_hosts_file):
roster_contents = """
localhost:
host: 127.0.0.1
@ -1509,6 +1424,7 @@ def salt_ssh_roster_file(sshd_server, salt_master):
)
if salt.utils.platform.is_darwin():
roster_contents += " set_path: $PATH:/usr/local/bin/\n"
with pytest.helpers.temp_file(
"roster", roster_contents, salt_master.config_dir
) as roster_file:
@ -1728,46 +1644,6 @@ def from_filenames_collection_modifyitems(config, items):
# ----- Custom Fixtures --------------------------------------------------------------------------------------------->
@pytest.fixture(scope="session")
def reap_stray_processes():
# Run tests
yield
children = psutil.Process(os.getpid()).children(recursive=True)
if not children:
log.info("No astray processes found")
return
def on_terminate(proc):
log.debug("Process %s terminated with exit code %s", proc, proc.returncode)
if children:
# Reverse the order, sublings first, parents after
children.reverse()
log.warning(
"Test suite left %d astray processes running. Killing those processes:\n%s",
len(children),
pprint.pformat(children),
)
_, alive = psutil.wait_procs(children, timeout=3, callback=on_terminate)
for child in alive:
try:
child.kill()
except psutil.NoSuchProcess:
continue
_, alive = psutil.wait_procs(alive, timeout=3, callback=on_terminate)
if alive:
# Give up
for child in alive:
log.warning(
"Process %s survived SIGKILL, giving up:\n%s",
child,
pprint.pformat(child.as_dict()),
)
@pytest.fixture(scope="session")
def sminion():
return create_sminion()

View file

@ -17,6 +17,7 @@ salt/_logging/(impl|handlers).py:
salt/modules/(apkpkg|aptpkg|ebuildpkg|dpkg_lowpkg|freebsdpkg|mac_brew_pkg|mac_ports_pkg|openbsdpkg|opkg|pacmanpkg|pkgin|pkgng|pkg_resource|rpm_lowpkg|solarisipspkg|solarispkg|win_pkg|xbpspkg|yumpkg|zypperpkg)\.py:
- pytests.unit.states.test_pkg
- pytests.functional.modules.test_pkg
- pytests.functional.modules.test_win_pkg
- pytests.functional.states.test_pkg
- pytests.functional.states.pkgrepo.test_centos
- pytests.functional.states.pkgrepo.test_debian

View file

@ -61,7 +61,7 @@ class SSHCustomModuleTest(SSHCase):
self.assertEqual(expected, cmd)
@pytest.mark.slow_test
@pytest.mark.timeout(120)
@pytest.mark.timeout(120, func_only=True)
def test_ssh_custom_module(self):
"""
Test custom module work using SSHCase environment

View file

@ -27,7 +27,7 @@ class LocaleModuleTest(ModuleCase):
locale = self.run_function("locale.get_locale")
self.assertNotIn("Unsupported platform!", locale)
@pytest.mark.timeout(120)
@pytest.mark.timeout_unless_on_windows(120)
@pytest.mark.destructive_test
@pytest.mark.slow_test
def test_gen_locale(self):

View file

@ -113,9 +113,9 @@ def _rand_key_name(length):
def _check_skip(grains):
if grains["os"] == "CentOS Stream" and grains["osmajorrelease"] == 9:
return True
if grains["os"] == "AlmaLinux" and grains["osmajorrelease"] == 9:
if (grains["os"] in ("CentOS Stream", "AlmaLinux", "Rocky")) and grains[
"osmajorrelease"
] == 9:
return True
return False

View file

@ -55,7 +55,7 @@ def salt_eauth_account_factory():
@pytest.fixture(scope="session")
def salt_auto_account_factory():
return TestAccount(username="saltdev_auto", password="saltdev")
return TestAccount(username="saltdev-auto")
@pytest.fixture(scope="session")

View file

@ -23,6 +23,10 @@ pytestmark = [
reason="These tests are currently broken on spawning platforms. Need to be rewritten.",
),
pytest.mark.slow_test,
pytest.mark.skipif(
"grains['osfinger'] == 'Rocky Linux-8' and grains['osarch'] == 'aarch64'",
reason="Temporarily skip on Rocky Linux 8 Arm64",
),
]

View file

@ -318,41 +318,35 @@ def test_listen_requisite_resolution_names(state, state_tree):
assert "test_|-listener_service_|-crond_|-mod_watch" in ret
def test_onlyif_req(state, subtests):
onlyif = [{}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.succeed_with_changes", onlyif=onlyif
)
assert ret.result is True
assert ret.comment == "Success!"
onlyif = [{"fun": "test.true"}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.succeed_without_changes", onlyif=onlyif
)
assert ret.result is True
assert not ret.changes
assert ret.comment == "Success!"
onlyif = [{"fun": "test.false"}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.fail_with_changes", onlyif=onlyif
)
assert ret.result is True
assert not ret.changes
assert ret.comment == "onlyif condition is false"
onlyif = [{"fun": "test.true"}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.fail_with_changes", onlyif=onlyif
)
assert ret.result is False
@pytest.mark.parametrize(
"fun,onlyif,result,comment,assert_changes",
(
("test.succeed_with_changes", [{}], True, "Success!", None),
(
"test.succeed_without_changes",
[{"fun": "test.true"}],
True,
"Success!",
False,
),
(
"test.fail_with_changes",
[{"fun": "test.false"}],
True,
"onlyif condition is false",
False,
),
("test.fail_with_changes", [{"fun": "test.true"}], False, "Failure!", True),
),
)
def test_onlyif_req(state, fun, onlyif, result, comment, assert_changes):
ret = state.single(name="onlyif test", fun=fun, onlyif=onlyif)
assert ret.result is result
assert ret.comment == comment
if assert_changes is True:
assert ret.changes
assert ret.comment == "Failure!"
elif assert_changes is False:
assert not ret.changes
def test_listen_requisite_not_exist(state, state_tree):

View file

@ -3,10 +3,10 @@ import pytest
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.core_test,
pytest.mark.timeout_unless_on_windows(240),
]
@pytest.mark.timeout(120)
def test_unless_req(state):
ret = state.single(fun="test.succeed_with_changes", name="unless test", unless=[{}])
assert ret.result is True
@ -36,7 +36,6 @@ def test_unless_req(state):
assert ret.comment == "Success!"
@pytest.mark.timeout(120)
def test_unless_req_retcode(state):
ret = state.single(
fun="test.succeed_with_changes",

View file

@ -22,7 +22,7 @@ def assistive(modules):
def osa_script(assistive):
osa_script_path = "/usr/bin/osascript"
try:
ret = assistive.install(osa_script_path, True)
assistive.install(osa_script_path, True)
yield osa_script_path
except CommandExecutionError as exc:
pytest.skip(f"Unable to install {osa_script}: {exc}")
@ -33,7 +33,7 @@ def osa_script(assistive):
@pytest.fixture
def install_remove_pkg_name(assistive):
def install_remove_pkg_name(assistive, grains):
smile_bundle = "com.smileonmymac.textexpander"
try:
yield smile_bundle
@ -44,12 +44,19 @@ def install_remove_pkg_name(assistive):
@pytest.mark.slow_test
def test_install_and_remove(assistive, install_remove_pkg_name):
def test_install_and_remove(assistive, install_remove_pkg_name, grains):
"""
Tests installing and removing a bundled ID or command to use assistive access.
"""
ret = assistive.install(install_remove_pkg_name)
assert ret
try:
ret = assistive.install(install_remove_pkg_name)
assert ret
except CommandExecutionError as exc:
if grains["osmajorrelease"] != 12:
raise exc from None
if "attempt to write a readonly database" not in str(exc):
raise exc from None
pytest.skip("Test fails on MacOS 12(attempt to write a readonly database)")
ret = assistive.remove(install_remove_pkg_name)
assert ret

View file

@ -7,7 +7,7 @@ import pytest
pytestmark = [
pytest.mark.slow_test,
pytest.mark.timeout(120),
pytest.mark.timeout(120, func_only=True),
pytest.mark.destructive_test,
pytest.mark.skip_if_not_root,
pytest.mark.skip_unless_on_darwin,

View file

@ -9,7 +9,7 @@ import pytest
from salt.exceptions import SaltInvocationError
pytestmark = [
pytest.mark.timeout(120),
pytest.mark.timeout(120, func_only=True),
pytest.mark.slow_test,
pytest.mark.destructive_test,
pytest.mark.skip_if_not_root,

View file

@ -8,7 +8,7 @@ from salt.exceptions import SaltInvocationError
pytestmark = [
pytest.mark.slow_test,
pytest.mark.timeout(240),
pytest.mark.timeout(240, func_only=True),
pytest.mark.skip_if_binaries_missing("softwareupdate"),
pytest.mark.skip_if_not_root,
pytest.mark.skip_unless_on_darwin,

View file

@ -156,6 +156,9 @@ def test_get_offset(timezone):
"""
Test timezone.get_offset
"""
pytz = pytest.importorskip("pytz")
now = datetime.datetime.now(tz=pytz.UTC)
ret = timezone.set_zone("Pacific/Wake")
assert ret
ret = timezone.get_offset()
@ -166,7 +169,11 @@ def test_get_offset(timezone):
assert ret
ret = timezone.get_offset()
assert isinstance(ret, str)
assert ret == "-0800"
if now.astimezone(pytz.timezone("America/Los_Angeles")).dst():
assert ret == "-0700"
else:
assert ret == "-0800"
@pytest.mark.usefixtures("_reset_zone")

View file

@ -0,0 +1,35 @@
import pytest
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.skip_unless_on_windows,
pytest.mark.slow_test,
]
@pytest.fixture(scope="module")
def pkg_def_contents(state_tree):
return r"""
my-software:
'1.0.1':
full_name: 'My Software'
installer: 'C:\files\mysoftware.msi'
install_flags: '/qn /norestart'
uninstaller: 'C:\files\mysoftware.msi'
uninstall_flags: '/qn /norestart'
msiexec: True
reboot: False
"""
@pytest.fixture(scope="module")
def pkg(modules):
yield modules.pkg
def test_refresh_db(pkg, pkg_def_contents, state_tree, minion_opts):
assert len(pkg.get_package_info("my-software")) == 0
repo_dir = state_tree / "win" / "repo-ng"
with pytest.helpers.temp_file("my-software.sls", pkg_def_contents, repo_dir):
pkg.refresh_db()
assert len(pkg.get_package_info("my-software")) == 1

View file

@ -0,0 +1,341 @@
import pytest
from saltfactories.utils import random_string
from salt.exceptions import CommandExecutionError
pytestmark = [
pytest.mark.destructive_test,
pytest.mark.skip_unless_on_windows,
pytest.mark.windows_whitelisted,
]
@pytest.fixture(scope="module")
def user(modules):
return modules.user
@pytest.fixture
def username_str(user):
_username = random_string("test-account-", uppercase=False)
try:
yield _username
finally:
try:
user.delete(_username, purge=True, force=True)
except Exception: # pylint: disable=broad-except
# The point here is just system cleanup. It can fail if no account was created
pass
@pytest.fixture
def username_int(user):
_username = random_string("", uppercase=False, lowercase=False, digits=True)
try:
yield _username
finally:
try:
user.delete(_username, purge=True, force=True)
except Exception: # pylint: disable=broad-except
# The point here is just system cleanup. It can fail if no account was created
pass
@pytest.fixture
def account_str(user, username_str):
with pytest.helpers.create_account(username=username_str) as account:
user.addgroup(account.username, "Users")
yield account
@pytest.fixture
def account_int(user, username_int):
with pytest.helpers.create_account(username=username_int) as account:
user.addgroup(account.username, "Users")
yield account
def test_add_str(user, username_str):
ret = user.add(name=username_str)
assert ret is True
assert username_str in user.list_users()
def test_add_int(user, username_int):
ret = user.add(name=username_int)
assert ret is True
assert username_int in user.list_users()
def test_addgroup_str(user, account_str):
ret = user.addgroup(account_str.username, "Backup Operators")
assert ret is True
ret = user.info(account_str.username)
assert "Backup Operators" in ret["groups"]
def test_addgroup_int(user, account_int):
ret = user.addgroup(account_int.username, "Backup Operators")
assert ret is True
ret = user.info(account_int.username)
assert "Backup Operators" in ret["groups"]
def test_chfullname_str(user, account_str):
ret = user.chfullname(account_str.username, "New Full Name")
assert ret is True
ret = user.info(account_str.username)
assert ret["fullname"] == "New Full Name"
def test_chfullname_int(user, account_int):
ret = user.chfullname(account_int.username, "New Full Name")
assert ret is True
ret = user.info(account_int.username)
assert ret["fullname"] == "New Full Name"
def test_chgroups_single_str(user, account_str):
groups = ["Backup Operators"]
ret = user.chgroups(account_str.username, groups=groups)
assert ret is True
ret = user.info(account_str.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_single_int(user, account_int):
groups = ["Backup Operators"]
ret = user.chgroups(account_int.username, groups=groups)
assert ret is True
ret = user.info(account_int.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_str(user, account_str):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_str.username, groups=groups)
assert ret is True
ret = user.info(account_str.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_int(user, account_int):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_int.username, groups=groups)
assert ret is True
ret = user.info(account_int.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_append_false_str(user, account_str):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_str.username, groups=groups, append=False)
assert ret is True
ret = user.info(account_str.username)
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_append_false_int(user, account_int):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_int.username, groups=groups, append=False)
assert ret is True
ret = user.info(account_int.username)
assert sorted(ret["groups"]) == sorted(groups)
def test_chhome_str(user, account_str):
home = r"C:\spongebob\squarepants"
ret = user.chhome(name=account_str.username, home=home)
assert ret is True
ret = user.info(name=account_str.username)
assert ret["home"] == home
def test_chhome_int(user, account_int):
home = r"C:\spongebob\squarepants"
ret = user.chhome(name=account_int.username, home=home)
assert ret is True
ret = user.info(name=account_int.username)
assert ret["home"] == home
def test_chprofile_str(user, account_str):
profile = r"C:\spongebob\squarepants"
ret = user.chprofile(name=account_str.username, profile=profile)
assert ret is True
ret = user.info(name=account_str.username)
assert ret["profile"] == profile
def test_chprofile_int(user, account_int):
profile = r"C:\spongebob\squarepants"
ret = user.chprofile(name=account_int.username, profile=profile)
assert ret is True
ret = user.info(name=account_int.username)
assert ret["profile"] == profile
def test_delete_str(user, account_str):
ret = user.delete(name=account_str.username)
assert ret is True
assert user.info(name=account_str.username) == {}
def test_delete_int(user, account_int):
ret = user.delete(name=account_int.username)
assert ret is True
assert user.info(name=account_int.username) == {}
def test_get_user_sid_str(user, account_str):
ret = user.get_user_sid(account_str.username)
assert ret.startswith("S-1-5")
def test_get_user_sid_int(user, account_int):
ret = user.get_user_sid(account_int.username)
assert ret.startswith("S-1-5")
def test_info_str(user, account_str):
ret = user.info(account_str.username)
assert ret["name"] == account_str.username
assert ret["uid"].startswith("S-1-5")
def test_info_int(user, account_int):
ret = user.info(account_int.username)
assert ret["name"] == account_int.username
assert ret["uid"].startswith("S-1-5")
def test_list_groups_str(user, account_str):
ret = user.list_groups(account_str.username)
assert ret == ["Users"]
def test_list_groups_int(user, account_int):
ret = user.list_groups(account_int.username)
assert ret == ["Users"]
def test_list_users(user):
ret = user.list_users()
assert "Administrator" in ret
def test_removegroup_str(user, account_str):
ret = user.removegroup(account_str.username, "Users")
assert ret is True
ret = user.info(account_str.username)
assert ret["groups"] == []
def test_removegroup_int(user, account_int):
ret = user.removegroup(account_int.username, "Users")
assert ret is True
ret = user.info(account_int.username)
assert ret["groups"] == []
def test_rename_str(user, account_str):
new_name = random_string("test-account-", uppercase=False)
ret = user.rename(name=account_str.username, new_name=new_name)
assert ret is True
assert new_name in user.list_users()
# Let's set it back so that it gets cleaned up...
ret = user.rename(name=new_name, new_name=account_str.username)
assert ret is True
def test_rename_str_missing(user, account_str):
missing = random_string("test-account-", uppercase=False)
with pytest.raises(CommandExecutionError):
user.rename(name=missing, new_name="spongebob")
def test_rename_str_existing(user, account_str):
new_existing = random_string("test-account-", uppercase=False)
ret = user.add(name=new_existing)
assert ret is True
with pytest.raises(CommandExecutionError):
user.rename(name=account_str.username, new_name=new_existing)
# We need to clean this up because it wasn't created in a fixture
ret = user.delete(name=new_existing, purge=True, force=True)
assert ret is True
assert new_existing not in user.list_users()
def test_rename_int(user, account_int):
new_name = random_string("", uppercase=False, lowercase=False, digits=True)
ret = user.rename(name=account_int.username, new_name=new_name)
assert ret is True
assert new_name in user.list_users()
# Let's set it back so that it gets cleaned up...
ret = user.rename(name=new_name, new_name=account_int.username)
assert ret is True
def test_rename_int_missing(user, account_int):
missing = random_string("", uppercase=False, lowercase=False, digits=True)
with pytest.raises(CommandExecutionError):
user.rename(name=missing, new_name="spongebob")
def test_rename_int_existing(user, account_int):
new_existing = random_string("", uppercase=False, lowercase=False, digits=True)
ret = user.add(name=new_existing)
assert ret is True
with pytest.raises(CommandExecutionError):
user.rename(name=account_int.username, new_name=new_existing)
# We need to clean this up because it wasn't created in a fixture
ret = user.delete(name=new_existing, purge=True, force=True)
assert ret is True
assert new_existing not in user.list_users()
def test_setpassword_str(user, account_str):
ret = user.setpassword(account_str.username, password="Sup3rS3cret")
# We have no way of verifying the password was changed on Windows, so the
# best we can do is check that the command completed successfully
assert ret is True
def test_setpassword_int(user, account_int):
ret = user.setpassword(account_int.username, password="Sup3rS3cret")
# We have no way of verifying the password was changed on Windows, so the
# best we can do is check that the command completed successfully
assert ret is True
@pytest.mark.parametrize(
"value_name, new_value, info_field, expected",
[
("description", "New description", "", None),
("homedrive", "H:", "", None),
("logonscript", "\\\\server\\script.cmd", "", None),
("expiration_date", "3/19/2024", "", "2024-03-19 00:00:00"),
("expiration_date", "Never", "", None),
("expired", True, "", None),
("expired", False, "", None),
("account_disabled", True, "", None),
("account_disabled", False, "", None),
("unlock_account", True, "account_locked", False),
("password_never_expires", True, "", None),
("password_never_expires", False, "", None),
("disallow_change_password", True, "", None),
("disallow_change_password", False, "", None),
],
)
def test_update_str(user, value_name, new_value, info_field, expected, account_str):
setting = {value_name: new_value}
ret = user.update(account_str.username, **setting)
assert ret is True
ret = user.info(account_str.username)
info_field = info_field if info_field else value_name
expected = expected if expected is not None else new_value
assert ret[info_field] == expected

View file

@ -436,3 +436,55 @@ def test_issue_12209_follow_symlinks(
assert one_group_check == state_file_account.group.name
two_group_check = modules.file.get_group(str(twodir), follow_symlinks=False)
assert two_group_check == state_file_account.group.name
@pytest.mark.parametrize("backupname_isfile", [False, True])
def test_directory_backupname_force_test_mode_noclobber(
file, tmp_path, backupname_isfile
):
"""
Ensure that file.directory does not make changes when backupname is used
alongside force=True and test=True.
See https://github.com/saltstack/salt/issues/66049
"""
source_dir = tmp_path / "source_directory"
source_dir.mkdir()
dest_dir = tmp_path / "dest_directory"
backupname = tmp_path / "backup_dir"
dest_dir.symlink_to(source_dir.resolve())
if backupname_isfile:
backupname.touch()
assert backupname.is_file()
ret = file.directory(
name=str(dest_dir),
allow_symlink=False,
force=True,
backupname=str(backupname),
test=True,
)
# Confirm None result
assert ret.result is None
try:
# Confirm dest_dir not modified
assert salt.utils.path.readlink(str(dest_dir)) == str(source_dir)
except OSError:
pytest.fail(f"{dest_dir} was modified")
# Confirm that comment and changes match what we expect
assert (
ret.comment
== f"{dest_dir} would be backed up and replaced with a new directory"
)
assert ret.changes[str(dest_dir)] == {"directory": "new"}
assert ret.changes["backup"] == f"{dest_dir} would be renamed to {backupname}"
if backupname_isfile:
assert ret.changes["forced"] == (
f"Existing file at backup path {backupname} would be removed"
)
else:
assert "forced" not in ret.changes

View file

@ -243,7 +243,7 @@ def copr_pkgrepo_with_comments_name(pkgrepo, grains):
):
pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"]))
if (
grains["os"] in ("CentOS Stream", "AlmaLinux")
grains["os"] in ("CentOS Stream", "AlmaLinux", "Rocky")
and grains["osmajorrelease"] == 9
or grains["osfinger"] == "Amazon Linux-2023"
):

Some files were not shown because too many files have changed in this diff Show more