Merge 3006.x into 3007.x

This commit is contained in:
Pedro Algarvio 2024-03-28 13:50:44 +00:00
commit 7f98810ea6
No known key found for this signature in database
GPG key ID: BB36BF6584A298FF
52 changed files with 1598 additions and 862 deletions

View file

@ -163,7 +163,7 @@ jobs:
macos-dependencies:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
timeout-minutes: 90
strategy:
fail-fast: false
@ -171,7 +171,7 @@ jobs:
include:
- distro-slug: macos-12
arch: x86_64
- distro-slug: macos-13-xlarge
- distro-slug: macos-13-arm64
arch: arm64
steps:

View file

@ -65,7 +65,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-deps-linux-${{ matrix.arch }}
cache-prefix: ${{ inputs.cache-seed }}|build-deps|linux|${{ matrix.arch }}
- name: Setup Relenv
id: setup-relenv
@ -116,7 +116,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-deps-macos
cache-prefix: ${{ inputs.cache-seed }}|build-deps|macos
- name: Setup Relenv
id: setup-relenv
@ -166,7 +166,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-deps-windows-${{ matrix.arch }}
cache-prefix: ${{ inputs.cache-seed }}|build-deps|windows|${{ matrix.arch }}
- name: Setup Relenv
id: setup-relenv

View file

@ -65,7 +65,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-windows
cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|linux
- name: Setup Salt Version
id: setup-salt-version
@ -130,7 +130,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos
cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|macos
- name: Setup Salt Version
id: setup-salt-version
@ -185,7 +185,7 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos
cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|windows
- name: Setup Salt Version
id: setup-salt-version

View file

@ -43,7 +43,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -60,7 +61,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -198,19 +199,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -230,10 +223,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -511,7 +500,7 @@ jobs:
almalinux-8-pkg-tests:
name: Alma Linux 8 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -532,7 +521,7 @@ jobs:
almalinux-8-arm64-pkg-tests:
name: Alma Linux 8 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -574,7 +563,7 @@ jobs:
almalinux-9-arm64-pkg-tests:
name: Alma Linux 9 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-9-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -595,7 +584,7 @@ jobs:
amazonlinux-2-pkg-tests:
name: Amazon Linux 2 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -616,7 +605,7 @@ jobs:
amazonlinux-2-arm64-pkg-tests:
name: Amazon Linux 2 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -637,7 +626,7 @@ jobs:
amazonlinux-2023-pkg-tests:
name: Amazon Linux 2023 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -679,7 +668,7 @@ jobs:
centos-7-pkg-tests:
name: CentOS 7 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -700,7 +689,7 @@ jobs:
debian-10-pkg-tests:
name: Debian 10 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-10') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -721,7 +710,7 @@ jobs:
debian-11-pkg-tests:
name: Debian 11 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -742,7 +731,7 @@ jobs:
debian-11-arm64-pkg-tests:
name: Debian 11 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -763,7 +752,7 @@ jobs:
debian-12-pkg-tests:
name: Debian 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -784,7 +773,7 @@ jobs:
debian-12-arm64-pkg-tests:
name: Debian 12 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -805,7 +794,7 @@ jobs:
photonos-4-pkg-tests:
name: Photon OS 4 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -827,7 +816,7 @@ jobs:
photonos-4-arm64-pkg-tests:
name: Photon OS 4 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -849,7 +838,7 @@ jobs:
photonos-5-pkg-tests:
name: Photon OS 5 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -871,7 +860,7 @@ jobs:
photonos-5-arm64-pkg-tests:
name: Photon OS 5 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -893,7 +882,7 @@ jobs:
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -914,7 +903,7 @@ jobs:
ubuntu-2004-arm64-pkg-tests:
name: Ubuntu 20.04 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -935,7 +924,7 @@ jobs:
ubuntu-2204-pkg-tests:
name: Ubuntu 22.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -977,7 +966,7 @@ jobs:
macos-12-pkg-tests:
name: macOS 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-12') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -985,6 +974,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -998,7 +988,7 @@ jobs:
macos-13-pkg-tests:
name: macOS 13 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1006,6 +996,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1017,7 +1008,7 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1026,7 +1017,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1040,7 +1032,7 @@ jobs:
windows-2016-nsis-pkg-tests:
name: Windows 2016 NSIS Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1061,7 +1053,7 @@ jobs:
windows-2016-msi-pkg-tests:
name: Windows 2016 MSI Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1082,7 +1074,7 @@ jobs:
windows-2019-nsis-pkg-tests:
name: Windows 2019 NSIS Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1103,7 +1095,7 @@ jobs:
windows-2019-msi-pkg-tests:
name: Windows 2019 MSI Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1166,7 +1158,7 @@ jobs:
windows-2016:
name: Windows 2016 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1187,7 +1179,7 @@ jobs:
windows-2019:
name: Windows 2019 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1229,13 +1221,14 @@ jobs:
macos-12:
name: macOS 12 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-12') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1250,13 +1243,14 @@ jobs:
macos-13:
name: macOS 13 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1269,7 +1263,7 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1277,7 +1271,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1292,7 +1287,7 @@ jobs:
almalinux-8:
name: Alma Linux 8 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1313,7 +1308,7 @@ jobs:
almalinux-8-arm64:
name: Alma Linux 8 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1355,7 +1350,7 @@ jobs:
almalinux-9-arm64:
name: Alma Linux 9 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-9-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1376,7 +1371,7 @@ jobs:
amazonlinux-2:
name: Amazon Linux 2 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1397,7 +1392,7 @@ jobs:
amazonlinux-2-arm64:
name: Amazon Linux 2 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1418,7 +1413,7 @@ jobs:
amazonlinux-2023:
name: Amazon Linux 2023 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1481,7 +1476,7 @@ jobs:
centos-7:
name: CentOS 7 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1502,7 +1497,7 @@ jobs:
debian-10:
name: Debian 10 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-10') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1523,7 +1518,7 @@ jobs:
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1544,7 +1539,7 @@ jobs:
debian-11-arm64:
name: Debian 11 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1565,7 +1560,7 @@ jobs:
debian-12:
name: Debian 12 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1586,7 +1581,7 @@ jobs:
debian-12-arm64:
name: Debian 12 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1607,7 +1602,7 @@ jobs:
fedora-39:
name: Fedora 39 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'fedora-39') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1628,7 +1623,7 @@ jobs:
opensuse-15:
name: Opensuse 15 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'opensuse-15') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1649,7 +1644,7 @@ jobs:
photonos-4:
name: Photon OS 4 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1671,7 +1666,7 @@ jobs:
photonos-4-arm64:
name: Photon OS 4 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1693,7 +1688,7 @@ jobs:
photonos-5:
name: Photon OS 5 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1715,7 +1710,7 @@ jobs:
photonos-5-arm64:
name: Photon OS 5 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1737,7 +1732,7 @@ jobs:
ubuntu-2004:
name: Ubuntu 20.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1758,7 +1753,7 @@ jobs:
ubuntu-2004-arm64:
name: Ubuntu 20.04 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1779,7 +1774,7 @@ jobs:
ubuntu-2204:
name: Ubuntu 22.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1831,7 +1826,7 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- macos-13-arm64
- almalinux-8
- almalinux-8-arm64
- almalinux-9
@ -1985,7 +1980,7 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- macos-13-arm64
- almalinux-8
- almalinux-8-arm64
- almalinux-9
@ -2035,7 +2030,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -95,7 +95,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -112,7 +113,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -250,19 +251,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -282,10 +275,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -1045,6 +1034,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1066,6 +1056,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1077,7 +1068,7 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1086,7 +1077,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1296,6 +1288,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1317,6 +1310,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1329,7 +1323,7 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1337,7 +1331,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1891,7 +1886,7 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- macos-13-arm64
- almalinux-8
- almalinux-8-arm64
- almalinux-9
@ -2801,7 +2796,7 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- macos-13-arm64
- almalinux-8
- almalinux-8-arm64
- almalinux-9
@ -2909,7 +2904,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -85,7 +85,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -102,7 +103,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -240,19 +241,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -272,10 +265,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -1027,6 +1016,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1048,6 +1038,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1059,7 +1050,7 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1068,7 +1059,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1278,6 +1270,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1299,6 +1292,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1311,7 +1305,7 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1319,7 +1313,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1873,7 +1868,7 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- macos-13-arm64
- almalinux-8
- almalinux-8-arm64
- almalinux-9
@ -2029,7 +2024,7 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- macos-13-arm64
- almalinux-8
- almalinux-8-arm64
- almalinux-9
@ -2079,7 +2074,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -73,7 +73,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -90,7 +91,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -237,19 +238,11 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -269,10 +262,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |
@ -1032,6 +1021,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1053,6 +1043,7 @@ jobs:
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1064,7 +1055,7 @@ jobs:
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-13-xlarge-pkg-tests:
macos-13-arm64-pkg-tests:
name: macOS 13 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1073,7 +1064,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -1283,6 +1275,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
runner: macos-12
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1304,6 +1297,7 @@ jobs:
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13
runner: macos-13
nox-session: ci-test-onedir
platform: macos
arch: x86_64
@ -1316,7 +1310,7 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
macos-13-xlarge:
macos-13-arm64:
name: macOS 13 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
@ -1324,7 +1318,8 @@ jobs:
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-13-xlarge
distro-slug: macos-13-arm64
runner: macos-13-xlarge
nox-session: ci-test-onedir
platform: macos
arch: arm64
@ -2777,7 +2772,7 @@ jobs:
- windows-2022
- macos-12
- macos-13
- macos-13-xlarge
- macos-13-arm64
- almalinux-8
- almalinux-8-arm64
- almalinux-9
@ -2827,7 +2822,7 @@ jobs:
- ubuntu-2204-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-xlarge-pkg-tests
- macos-13-arm64-pkg-tests
- windows-2016-nsis-pkg-tests
- windows-2016-msi-pkg-tests
- windows-2019-nsis-pkg-tests

View file

@ -163,7 +163,7 @@ jobs:
macos-dependencies:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
timeout-minutes: 90
strategy:
fail-fast: false

View file

@ -17,7 +17,7 @@
relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>"
source: "<{ backend }>"
<%- if gh_environment %>
<%- if gh_environment != "ci" %>
environment: <{ gh_environment }>
sign-macos-packages: true
sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %>

View file

@ -1,3 +1,5 @@
<%- set gh_environment = gh_environment|default("ci") %>
<%- extends 'layout.yml.jinja' %>
<%- set pre_commit_version = "3.0.4" %>

View file

@ -91,7 +91,8 @@ jobs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
pull-labels: ${{ steps.get-pull-labels.outputs.labels }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
@ -108,7 +109,7 @@ jobs:
- name: Get Changed Files
if: ${{ github.event_name == 'pull_request'}}
id: changed-files
uses: dorny/paths-filter@v2
uses: dorny/paths-filter@v3
with:
token: ${{ github.token }}
list-files: json
@ -260,21 +261,13 @@ jobs:
run: |
tools ci runner-types ${{ github.event_name }}
- name: Check Defined Runners
run: |
echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.'
- name: Define Jobs
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
}> ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
@ -294,10 +287,6 @@ jobs:
run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run: |
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run: |

View file

@ -69,10 +69,10 @@ jobs:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["linux"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- for os in test_salt_pkg_downloads_listing["linux"] %>
- distro-slug: <{ os.slug }>
arch: <{ os.arch }>
pkg-type: <{ os.pkg_type }>
<%- endfor %>
steps:
@ -271,7 +271,7 @@ jobs:
macos:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
env:
USE_S3_CACHE: 'false'
environment: ${{ inputs.environment }}
@ -280,10 +280,10 @@ jobs:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["macos"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- for os in test_salt_pkg_downloads_listing["macos"] %>
- distro-slug: <{ os.slug }>
arch: <{ os.arch }>
pkg-type: <{ os.pkg_type }>
<%- endfor %>
steps:
@ -485,10 +485,10 @@ jobs:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["windows"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- for os in test_salt_pkg_downloads_listing["windows"] %>
- distro-slug: <{ os.slug }>
arch: <{ os.arch }>
pkg-type: <{ os.pkg_type }>
<%- endfor %>
steps:

View file

@ -1,28 +1,32 @@
<%- for slug, display_name, arch, pkg_type, fips in test_salt_pkg_listing["linux"] %>
<%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %>
<%- for os in test_salt_pkg_listing["linux"] %>
<%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> Package Test
name: <{ os.display_name }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: linux
arch: <{ arch }>
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ pkg_type }>
pkg-type: <{ os.pkg_type }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
<%- if fips == "fips" %>
<%- if os.fips %>
fips: true
<%- endif %>
@ -30,23 +34,28 @@
<%- for slug, display_name, arch in test_salt_pkg_listing["macos"] %>
<%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %>
<%- for os in test_salt_pkg_listing["macos"] %>
<%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> Package Test
name: <{ os.display_name }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
runner: <{ os.runner }>
nox-session: ci-test-onedir
platform: macos
arch: <{ arch }>
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: macos
nox-version: <{ nox_version }>
@ -58,31 +67,33 @@
<%- endfor %>
<%- for slug, display_name, arch in test_salt_pkg_listing["windows"] %>
<%- for pkg_type in ("NSIS", "MSI") %>
<%- set job_name = "{}-{}-pkg-tests".format(slug.replace(".", ""), pkg_type.lower()) %>
<%- for os in test_salt_pkg_listing["windows"] %>
<%- set job_name = "{}-{}-pkg-tests".format(os.slug.replace(".", ""), os.pkg_type.lower()) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> <{ pkg_type }> Package Test
name: <{ os.display_name }> <{ os.pkg_type }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-windows.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: windows
arch: <{ arch }>
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ pkg_type }>
pkg-type: <{ os.pkg_type }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
<%- endfor %>
<%- endfor %>

View file

@ -6,18 +6,22 @@
<%- set partial_testrun_timeout_value = 360 %>
<%- set windows_full_testrun_timeout_value = full_testrun_timeout_value + 30 %>
<%- for slug, display_name, arch in test_salt_listing["windows"] %>
<%- for os in test_salt_listing["windows"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }> Test
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-windows.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: windows
arch: amd64
@ -33,21 +37,26 @@
<%- endfor %>
<%- for slug, display_name, arch in test_salt_listing["macos"] %>
<%- for os in test_salt_listing["macos"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }> Test
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
runner: <{ os.runner }>
nox-session: ci-test-onedir
platform: macos
arch: <{ arch }>
arch: <{ os.arch }>
nox-version: <{ nox_version }>
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
@ -59,21 +68,25 @@
<%- endfor %>
<%- for slug, display_name, arch, fips in test_salt_listing["linux"] %>
<%- for os in test_salt_listing["linux"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }> Test
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: <{ slug }>
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: linux
arch: <{ arch }>
arch: <{ os.arch }>
nox-version: <{ nox_version }>
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
@ -82,7 +95,7 @@
skip-code-coverage: <{ skip_test_coverage_check }>
workflow-slug: <{ workflow_slug }>
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
<%- if fips == "fips" %>
<%- if os.fips %>
fips: true
<%- endif %>

View file

@ -8,6 +8,10 @@ on:
required: true
type: string
description: The OS slug to run tests against
runner:
required: true
type: string
description: The GitHub runner name
nox-session:
required: true
type: string
@ -97,7 +101,7 @@ jobs:
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
runs-on: ${{ inputs.runner }}
timeout-minutes: ${{ inputs.timeout-minutes }}
needs:
- generate-matrix

View file

@ -380,7 +380,7 @@ jobs:
macos:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
env:
USE_S3_CACHE: 'false'
environment: ${{ inputs.environment }}
@ -395,10 +395,10 @@ jobs:
- distro-slug: macos-13
arch: x86_64
pkg-type: package
- distro-slug: macos-13-xlarge
- distro-slug: macos-13-arm64
arch: arm64
pkg-type: package
- distro-slug: macos-13-xlarge
- distro-slug: macos-13-arm64
arch: arm64
pkg-type: onedir

View file

@ -7,6 +7,10 @@ on:
required: true
type: string
description: The OS slug to run tests against
runner:
required: true
type: string
description: The GitHub runner name
platform:
required: true
type: string
@ -98,7 +102,7 @@ jobs:
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
runs-on: ${{ inputs.runner }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- generate-matrix

View file

@ -59,7 +59,7 @@ repos:
- id: tools
alias: generate-workflows
name: Generate GitHub Workflow Templates
files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/templates/.*)$
files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/.*)$
pass_filenames: false
args:
- pre-commit

2
changelog/53363.fixed.md Normal file
View file

@ -0,0 +1,2 @@
``user.add`` on Windows now allows you to add user names that contain all
numeric characters

1
changelog/65200.fixed.md Normal file
View file

@ -0,0 +1 @@
Prevent full system upgrade on single package install for Arch Linux

1
changelog/66143.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix systemctl with "try-restart" instead of "retry-restart" within the RPM spec, properly restarting upgraded services

1
changelog/66280.fixed.md Normal file
View file

@ -0,0 +1 @@
Add leading slash to salt helper file paths as per dh_links requirement

View file

@ -4,3 +4,10 @@ relenv_version: "0.15.1"
release_branches:
- "3006.x"
- "3007.x"
mandatory_os_slugs:
- almalinux-9
- amazonlinux-2023-arm64
- archlinux-lts
- macos-13-arm64
- ubuntu-22.04-arm64
- windows-2022

View file

@ -1,6 +1,6 @@
opt/saltstack/salt/salt-master /usr/bin/salt-master
opt/saltstack/salt/salt usr/bin/salt
opt/saltstack/salt/salt-cp usr/bin/salt-cp
opt/saltstack/salt/salt-key usr/bin/salt-key
opt/saltstack/salt/salt-run usr/bin/salt-run
opt/saltstack/salt/spm usr/bin/spm
opt/saltstack/salt/salt /usr/bin/salt
opt/saltstack/salt/salt-cp /usr/bin/salt-cp
opt/saltstack/salt/salt-key /usr/bin/salt-key
opt/saltstack/salt/salt-run /usr/bin/salt-run
opt/saltstack/salt/spm /usr/bin/spm

View file

@ -439,16 +439,16 @@ find /etc/salt /opt/saltstack/salt /var/log/salt /var/cache/salt /var/run/salt \
# %%systemd_preun salt-syndic.service > /dev/null 2>&1
if [ $1 -eq 0 ] ; then
# Package removal, not upgrade
systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || :
systemctl stop salt-syndic.service > /dev/null 2>&1 || :
/bin/systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || :
/bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || :
fi
%preun minion
# %%systemd_preun salt-minion.service
if [ $1 -eq 0 ] ; then
# Package removal, not upgrade
systemctl --no-reload disable salt-minion.service > /dev/null 2>&1 || :
systemctl stop salt-minion.service > /dev/null 2>&1 || :
/bin/systemctl --no-reload disable salt-minion.service > /dev/null 2>&1 || :
/bin/systemctl stop salt-minion.service > /dev/null 2>&1 || :
fi
@ -456,8 +456,8 @@ fi
# %%systemd_preun salt-api.service
if [ $1 -eq 0 ] ; then
# Package removal, not upgrade
systemctl --no-reload disable salt-api.service > /dev/null 2>&1 || :
systemctl stop salt-api.service > /dev/null 2>&1 || :
/bin/systemctl --no-reload disable salt-api.service > /dev/null 2>&1 || :
/bin/systemctl stop salt-api.service > /dev/null 2>&1 || :
fi
@ -472,14 +472,6 @@ ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud
%post master
# %%systemd_post salt-master.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-master.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-master.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt
ln -s -f /opt/saltstack/salt/salt-cp %{_bindir}/salt-cp
ln -s -f /opt/saltstack/salt/salt-key %{_bindir}/salt-key
@ -498,27 +490,27 @@ if [ $1 -lt 2 ]; then
fi
fi
fi
# %%systemd_post salt-master.service
if [ $1 -gt 1 ] ; then
# Upgrade
/bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || :
else
# Initial installation
/bin/systemctl preset salt-master.service >/dev/null 2>&1 || :
fi
%post syndic
ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic
# %%systemd_post salt-syndic.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-syndic.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-syndic.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-syndic.service >/dev/null 2>&1 || :
/bin/systemctl preset salt-syndic.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic
%post minion
# %%systemd_post salt-minion.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-minion.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-minion.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt-minion %{_bindir}/salt-minion
ln -s -f /opt/saltstack/salt/salt-call %{_bindir}/salt-call
ln -s -f /opt/saltstack/salt/salt-proxy %{_bindir}/salt-proxy
@ -535,20 +527,28 @@ if [ $1 -lt 2 ]; then
fi
fi
fi
# %%systemd_post salt-minion.service
if [ $1 -gt 1 ] ; then
# Upgrade
/bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || :
else
# Initial installation
/bin/systemctl preset salt-minion.service >/dev/null 2>&1 || :
fi
%post ssh
ln -s -f /opt/saltstack/salt/salt-ssh %{_bindir}/salt-ssh
%post api
ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api
# %%systemd_post salt-api.service
if [ $1 -gt 1 ] ; then
# Upgrade
systemctl retry-restart salt-api.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-api.service >/dev/null 2>&1 || :
else
# Initial installation
systemctl preset salt-api.service >/dev/null 2>&1 || :
/bin/systemctl preset salt-api.service >/dev/null 2>&1 || :
fi
ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api
%posttrans cloud
@ -589,10 +589,10 @@ fi
%postun master
# %%systemd_postun_with_restart salt-master.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-master.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || :
fi
if [ $1 -eq 0 ]; then
if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then
@ -610,18 +610,18 @@ fi
%postun syndic
# %%systemd_postun_with_restart salt-syndic.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-syndic.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-syndic.service >/dev/null 2>&1 || :
fi
%postun minion
# %%systemd_postun_with_restart salt-minion.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-minion.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || :
fi
if [ $1 -eq 0 ]; then
if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then
@ -639,10 +639,10 @@ fi
%postun api
# %%systemd_postun_with_restart salt-api.service
systemctl daemon-reload >/dev/null 2>&1 || :
/bin/systemctl daemon-reload >/dev/null 2>&1 || :
if [ $1 -ge 1 ] ; then
# Package upgrade, not uninstall
systemctl try-restart salt-api.service >/dev/null 2>&1 || :
/bin/systemctl try-restart salt-api.service >/dev/null 2>&1 || :
fi
%changelog

View file

@ -103,7 +103,6 @@ if ( $install_build_tools ) {
"--add Microsoft.VisualStudio.Component.Windows81SDK", `
"--add Microsoft.VisualStudio.Component.Windows10SDK.17763", `
"--add Microsoft.VisualStudio.Component.VC.140", `
"--add Microsoft.Component.VC.Runtime.UCRTSDK", `
"--lang en-US", `
"--includeRecommended", `
"--quiet", `

View file

@ -246,15 +246,6 @@ IMCAC - Immediate Custom Action - It's immediate
<CustomAction Id="DeleteConfig2_CADH" Property="DeleteConfig2_DECAC" Value="CLEAN_INSTALL=[CLEAN_INSTALL];REMOVE_CONFIG=[REMOVE_CONFIG];INSTALLDIR=[INSTALLDIR];ROOTDIR=[ROOTDIR];" />
<CustomAction Id="MoveInsecureConfig_CADH" Property="MoveInsecureConfig_DECAC" Value="INSECURE_CONFIG_FOUND=[INSECURE_CONFIG_FOUND];" />
<!-- Install VC++ runtime -->
<DirectoryRef Id="TARGETDIR">
<!-- Visual C++ runtimes depend on the target platform -->
<?if $(var.WIN64)=yes ?>
<Merge Id="MSM_VC143_CRT" SourceFile="$(var.WEBCACHE_DIR)\Microsoft_VC143_CRT_x64.msm" DiskId="1" Language="0" />
<?else ?>
<Merge Id="MSM_VC143_CRT" SourceFile="$(var.WEBCACHE_DIR)\Microsoft_VC143_CRT_x86.msm" DiskId="1" Language="0" />
<?endif ?>
</DirectoryRef>
<!-- Add INSTALLDIR to the system Path -->
<DirectoryRef Id="TARGETDIR">
<Component Id="INSTALLDIR_System_Path" Guid="A9F54641-91F8-4AFB-B812-9409E6EA0192">
@ -267,7 +258,6 @@ IMCAC - Immediate Custom Action - It's immediate
<!-- Leaving registry keys would mean the product is still installed -->
<Feature Id="ProductFeature" Title="Minion" Level="1">
<ComponentGroupRef Id="ProductComponents" />
<Feature Id="VC143" Title="VC++ 2022" AllowAdvertise="no" Display="hidden"><MergeRef Id="MSM_VC143_CRT" /></Feature>
</Feature>
<!-- Get the config file template from the msi store only if no config is present -->

View file

@ -10,8 +10,6 @@ You need
- .Net 3.5 SDK (for WiX)<sup>*</sup>
- [Wix 3](http://wixtoolset.org/releases/)<sup>**</sup>
- [Build tools 2015](https://www.microsoft.com/en-US/download/confirmation.aspx?id=48159)<sup>**</sup>
- Microsoft_VC143_CRT_x64.msm from Visual Studio 2015<sup>**</sup>
- Microsoft_VC143_CRT_x86.msm from Visual Studio 2015<sup>**</sup>
Notes:
- <sup>*</sup> `build.cmd` will open `optionalfeatures` if necessary.

View file

@ -76,8 +76,6 @@ function VerifyOrDownload ($local_file, $URL, $SHA256) {
# Script Variables
#-------------------------------------------------------------------------------
$WEBCACHE_DIR = "$env:TEMP\msi_build_cache_dir"
$DEPS_URL = "https://repo.saltproject.io/windows/dependencies"
$PROJECT_DIR = $(git rev-parse --show-toplevel)
$BUILD_DIR = "$PROJECT_DIR\pkg\windows\build"
$BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv"
@ -124,21 +122,6 @@ Write-Host "- Architecture: $BUILD_ARCH"
Write-Host "- Salt Version: $Version"
Write-Host $("-" * 80)
#-------------------------------------------------------------------------------
# Ensure cache dir exists
#-------------------------------------------------------------------------------
if ( ! (Test-Path -Path $WEBCACHE_DIR) ) {
Write-Host "Creating cache directory: " -NoNewline
New-Item -ItemType directory -Path $WEBCACHE_DIR | Out-Null
if ( Test-Path -Path $WEBCACHE_DIR ) {
Write-Result "Success" -ForegroundColor Green
} else {
Write-Result "Failed" -ForegroundColor Red
exit 1
}
}
#-------------------------------------------------------------------------------
# Ensure WIX environment variable is set, if not refresh and check again
#-------------------------------------------------------------------------------
@ -159,19 +142,6 @@ if ( ! "$env:WIX" ) {
}
}
#-------------------------------------------------------------------------------
# Caching VC++ Runtimes
#-------------------------------------------------------------------------------
$RUNTIMES = @(
("Microsoft_VC143_CRT_x64.msm", "64", "F209B8906063A79B0DFFBB55D3C20AC0A676252DD4F5377CFCD148C409C859EC"),
("Microsoft_VC143_CRT_x86.msm", "32", "B187BD73C7DC0BA353C5D3A6D9D4E63EF72435F8E68273466F30E5496C1A86F7")
)
$RUNTIMES | ForEach-Object {
$name, $arch, $hash = $_
VerifyOrDownload "$WEBCACHE_DIR\$name" "$DEPS_URL/$arch/$name" "$hash"
}
#-------------------------------------------------------------------------------
# Converting to MSI Version
#-------------------------------------------------------------------------------
@ -608,7 +578,6 @@ Push-Location $SCRIPT_DIR
-dDisplayVersion="$Version" `
-dInternalVersion="$INTERNAL_VERSION" `
-dDISCOVER_INSTALLDIR="$($DISCOVER_INSTALLDIR[$i])" `
-dWEBCACHE_DIR="$WEBCACHE_DIR" `
-dDISCOVER_CONFDIR="$DISCOVER_CONFDIR" `
-ext "$($ENV:WIX)bin\WixUtilExtension.dll" `
-ext "$($ENV:WIX)bin\WixUIExtension.dll" `

View file

@ -524,171 +524,6 @@ InstallDirRegKey HKLM "${PRODUCT_DIR_REGKEY}" ""
ShowInstDetails show
ShowUnInstDetails show
Section -copy_prereqs
# Copy prereqs to the Plugins Directory
# These files are downloaded by build_pkg.bat
# This directory gets removed upon completion
SetOutPath "$PLUGINSDIR\"
File /r "..\..\prereqs\"
SectionEnd
# Check if the Windows 10 Universal C Runtime (KB2999226) is installed. Python
# 3 needs the updated ucrt on Windows 8.1/2012R2 and lower. They are installed
# via KB2999226, but we're not going to patch the system here. Instead, we're
# going to copy the .dll files to the \salt\bin directory
Section -install_ucrt
Var /GLOBAL UcrtFileName
# Get the Major.Minor version Number
# Windows 10 introduced CurrentMajorVersionNumber
ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows NT\CurrentVersion" \
CurrentMajorVersionNumber
# Windows 10/2016 will return a value here, skip to the end if returned
StrCmp $R0 '' lbl_needs_ucrt 0
# Found Windows 10
detailPrint "KB2999226 does not apply to this machine"
goto lbl_done
lbl_needs_ucrt:
# UCRT only needed on Windows Server 2012R2/Windows 8.1 and below. The
# first ReadRegStr command above should have skipped to lbl_done if on
# Windows 10 box
# Is the update already installed
ClearErrors
# Use WMI to check if it's installed
detailPrint "Checking for existing UCRT (KB2999226) installation"
nsExec::ExecToStack 'cmd /q /c wmic qfe get hotfixid | findstr "^KB2999226"'
# Clean up the stack
Pop $R0 # Gets the ErrorCode
Pop $R1 # Gets the stdout, which should be KB2999226 if it's installed
# If it returned KB2999226 it's already installed
StrCmp $R1 'KB2999226' lbl_done
detailPrint "UCRT (KB2999226) not found"
# Use RunningX64 here to get the Architecture for the system running the
# installer.
${If} ${RunningX64}
StrCpy $UcrtFileName "ucrt_x64.zip"
${Else}
StrCpy $UcrtFileName "ucrt_x86.zip"
${EndIf}
ClearErrors
detailPrint "Unzipping UCRT dll files to $INSTDIR\Scripts"
CreateDirectory $INSTDIR\Scripts
nsisunz::UnzipToLog "$PLUGINSDIR\$UcrtFileName" "$INSTDIR\Scripts"
# Clean up the stack
Pop $R0 # Get Error
${IfNot} $R0 == "success"
detailPrint "error: $R0"
Sleep 3000
${Else}
detailPrint "UCRT dll files copied successfully"
${EndIf}
lbl_done:
SectionEnd
# Check and install Visual C++ redist 2022 packages
# Hidden section (-) to install VCRedist
Section -install_vcredist_2022
Var /GLOBAL VcRedistName
Var /GLOBAL VcRedistReg
# Only install 64bit VCRedist on 64bit machines
# Use RunningX64 here to get the Architecture for the system running the
# installer.
${If} ${RunningX64}
StrCpy $VcRedistName "vcredist_x64_2022"
StrCpy $VcRedistReg "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64"
${Else}
StrCpy $VcRedistName "vcredist_x86_2022"
StrCpy $VcRedistReg "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x86"
${EndIf}
# Detecting VCRedist Installation
detailPrint "Checking for $VcRedistName..."
ReadRegDword $0 HKLM $VcRedistReg "Installed"
StrCmp $0 "1" +2 0
Call InstallVCRedist
SectionEnd
Function InstallVCRedist
detailPrint "System requires $VcRedistName"
MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 \
"$VcRedistName is currently not installed. Would you like to \
install?" \
/SD IDYES IDYES InstallVcRedist
detailPrint "$VcRedistName not installed"
detailPrint ">>>Installation aborted by user<<<"
MessageBox MB_ICONEXCLAMATION \
"$VcRedistName not installed. Aborted by user.$\n$\n\
Installer will now close." \
/SD IDOK
Quit
InstallVcRedist:
# If an output variable is specified ($0 in the case below), ExecWait
# sets the variable with the exit code (and only sets the error flag if
# an error occurs; if an error occurs, the contents of the user
# variable are undefined).
# http://nsis.sourceforge.net/Reference/ExecWait
ClearErrors
detailPrint "Installing $VcRedistName..."
ExecWait '"$PLUGINSDIR\$VcRedistName.exe" /install /quiet /norestart' $0
IfErrors 0 CheckVcRedistErrorCode
detailPrint "An error occurred during installation of $VcRedistName"
MessageBox MB_OK|MB_ICONEXCLAMATION \
"$VcRedistName failed to install. Try installing the package \
manually.$\n$\n\
The installer will now close." \
/SD IDOK
Quit
CheckVcRedistErrorCode:
# Check for Reboot Error Code (3010)
${If} $0 == 3010
detailPrint "$VcRedistName installed but requires a restart to complete."
detailPrint "Reboot and run Salt install again"
MessageBox MB_OK|MB_ICONINFORMATION \
"$VcRedistName installed but requires a restart to complete." \
/SD IDOK
# Check for any other errors
${ElseIfNot} $0 == 0
detailPrint "An error occurred during installation of $VcRedistName"
detailPrint "Error: $0"
MessageBox MB_OK|MB_ICONEXCLAMATION \
"$VcRedistName failed to install. Try installing the package \
mnually.$\n\
ErrorCode: $0$\n\
The installer will now close." \
/SD IDOK
${EndIf}
FunctionEnd
Section "MainSection" SEC01
${If} $MoveExistingConfig == 1
@ -763,7 +598,6 @@ Function .onInit
${EndIf}
${EndIf}
InitPluginsDir
Call parseInstallerCommandLineSwitches
# Uninstall msi-installed salt

View file

@ -35,7 +35,6 @@ $SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").Directo
$WINDOWS_DIR = "$PROJECT_DIR\pkg\windows"
$NSIS_DIR = "$WINDOWS_DIR\nsis"
$BUILDENV_DIR = "$WINDOWS_DIR\buildenv"
$PREREQS_DIR = "$WINDOWS_DIR\prereqs"
$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
#-------------------------------------------------------------------------------
@ -50,8 +49,7 @@ Write-Host $("-" * 80)
# Setup Directories
#-------------------------------------------------------------------------------
$directories = "$PREREQS_DIR",
"$BUILDENV_DIR",
$directories = "$BUILDENV_DIR",
"$BUILDENV_DIR\configs"
$directories | ForEach-Object {
if ( ! (Test-Path -Path "$_") ) {
@ -70,19 +68,6 @@ $directories | ForEach-Object {
# Create binaries
#-------------------------------------------------------------------------------
$prereq_files = "vcredist_x86_2022.exe",
"vcredist_x64_2022.exe",
$prereq_files | ForEach-Object {
Write-Host "Creating $_`: " -NoNewline
Set-Content -Path "$PREREQS_DIR\$_" -Value "binary"
if ( Test-Path -Path "$PREREQS_DIR\$_" ) {
Write-Result "Success"
} else {
Write-Result "Failed" -ForegroundColor Red
exit 1
}
}
$binary_files = "ssm.exe",
"python.exe"
$binary_files | ForEach-Object {

View file

@ -62,7 +62,6 @@ if ( $BuildDir ) {
} else {
$BUILD_DIR = "$SCRIPT_DIR\buildenv"
}
$PREREQ_DIR = "$SCRIPT_DIR\prereqs"
$SCRIPTS_DIR = "$BUILD_DIR\Scripts"
$BUILD_CONF_DIR = "$BUILD_DIR\configs"
$SITE_PKGS_DIR = "$BUILD_DIR\Lib\site-packages"
@ -126,17 +125,6 @@ if ( Test-Path -Path $BUILD_CONF_DIR) {
}
}
if ( Test-Path -Path $PREREQ_DIR ) {
Write-Host "Removing PreReq Directory: " -NoNewline
Remove-Item -Path $PREREQ_DIR -Recurse -Force
if ( ! (Test-Path -Path $PREREQ_DIR) ) {
Write-Result "Success" -ForegroundColor Green
} else {
Write-Result "Failed" -ForegroundColor Red
exit 1
}
}
#-------------------------------------------------------------------------------
# Staging the Build Environment
#-------------------------------------------------------------------------------
@ -183,18 +171,6 @@ $scripts | ForEach-Object {
}
}
# Copy VCRedist 2022 to the prereqs directory
New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null
Write-Host "Copying VCRedist 2022 $ARCH_X to prereqs: " -NoNewline
$file = "vcredist_$ARCH_X`_2022.exe"
Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file"
if ( Test-Path -Path "$PREREQ_DIR\$file" ) {
Write-Result "Success" -ForegroundColor Green
} else {
Write-Result "Failed" -ForegroundColor Red
exit 1
}
#-------------------------------------------------------------------------------
# Remove binaries not needed by Salt
#-------------------------------------------------------------------------------

View file

@ -551,7 +551,7 @@ def install(
cmd.append("-S")
if refresh is True:
cmd.append("-y")
if sysupgrade is True or (sysupgrade is None and refresh is True):
if sysupgrade is True:
cmd.append("-u")
cmd.extend(["--noprogressbar", "--noconfirm", "--needed"])
wildcards = []

View file

@ -329,7 +329,7 @@ class daclConstants:
return path
def _getUserSid(user):
def _get_user_sid(user):
"""
return a state error dictionary, with 'sid' as a field if it could be returned
if user is None, sid will also be None
@ -413,7 +413,7 @@ def get(path, objectType, user=None):
"""
ret = {"Path": path, "ACLs": []}
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if path and objectType:
dc = daclConstants()
@ -458,7 +458,7 @@ def add_ace(path, objectType, user, permission, acetype, propagation):
acetype = acetype.strip().upper()
propagation = propagation.strip().upper()
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if not sidRet["result"]:
return sidRet
permissionbit = dc.getPermissionBit(objectTypeBit, permission)
@ -555,7 +555,7 @@ def rm_ace(path, objectType, user, permission=None, acetype=None, propagation=No
if check_ace(path, objectType, user, permission, acetype, propagation, True)[
"Exists"
]:
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if not sidRet["result"]:
return sidRet
permissionbit = (
@ -804,7 +804,7 @@ def check_inheritance(path, objectType, user=None):
ret = {"result": False, "Inheritance": False, "comment": ""}
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
dc = daclConstants()
objectType = dc.getObjectTypeBit(objectType)
@ -880,7 +880,7 @@ def check_ace(
dc.getPropagationBit(objectTypeBit, propagation) if propagation else None
)
sidRet = _getUserSid(user)
sidRet = _get_user_sid(user)
if not sidRet["result"]:
return sidRet

View file

@ -22,6 +22,7 @@ Module for managing Windows Users.
This currently only works with local user accounts, not domain accounts
"""
import ctypes
import logging
import shlex
import time
@ -30,6 +31,8 @@ from datetime import datetime
import salt.utils.args
import salt.utils.dateutils
import salt.utils.platform
import salt.utils.versions
import salt.utils.win_reg
import salt.utils.winapi
from salt.exceptions import CommandExecutionError
@ -82,7 +85,7 @@ def add(
Add a user to the minion.
Args:
name (str): User name
name (str): The username for the new account
password (str, optional): User's password in plain text.
@ -106,7 +109,7 @@ def add(
logs on.
Returns:
bool: True if successful. False is unsuccessful.
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -116,10 +119,13 @@ def add(
"""
user_info = {}
if name:
user_info["name"] = name
user_info["name"] = str(name)
else:
return False
user_info["password"] = password
if password:
user_info["password"] = str(password)
else:
user_info["password"] = None
user_info["priv"] = win32netcon.USER_PRIV_USER
user_info["home_dir"] = home
user_info["comment"] = description
@ -160,13 +166,13 @@ def update(
):
# pylint: disable=anomalous-backslash-in-string
"""
Updates settings for the windows user. Name is the only required parameter.
Updates settings for the Windows user. Name is the only required parameter.
Settings will only be changed if the parameter is passed a value.
.. versionadded:: 2015.8.0
Args:
name (str): The user name to update.
name (str): The username to update.
password (str, optional): New user password in plain text.
@ -206,7 +212,7 @@ def update(
changing the password. False allows the user to change the password.
Returns:
bool: True if successful. False is unsuccessful.
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -219,7 +225,7 @@ def update(
# Make sure the user exists
# Return an object containing current settings for the user
try:
user_info = win32net.NetUserGetInfo(None, name, 4)
user_info = win32net.NetUserGetInfo(None, str(name), 4)
except win32net.error as exc:
log.error("Failed to update user %s", name)
log.error("nbr: %s", exc.winerror)
@ -230,7 +236,9 @@ def update(
# Check parameters to update
# Update the user object with new settings
if password:
user_info["password"] = password
user_info["password"] = str(password)
else:
user_info["password"] = None
if home:
user_info["home_dir"] = home
if homedrive:
@ -251,7 +259,7 @@ def update(
dt_obj = salt.utils.dateutils.date_cast(expiration_date)
except (ValueError, RuntimeError):
return f"Invalid Date/Time Format: {expiration_date}"
user_info["acct_expires"] = time.mktime(dt_obj.timetuple())
user_info["acct_expires"] = int(dt_obj.timestamp())
if expired is not None:
if expired:
user_info["password_expired"] = 1
@ -263,6 +271,7 @@ def update(
else:
user_info["flags"] &= ~win32netcon.UF_ACCOUNTDISABLE
if unlock_account is not None:
# We can only unlock with this flag... we can't unlock
if unlock_account:
user_info["flags"] &= ~win32netcon.UF_LOCKOUT
if password_never_expires is not None:
@ -278,7 +287,7 @@ def update(
# Apply new settings
try:
win32net.NetUserSetInfo(None, name, 4, user_info)
win32net.NetUserSetInfo(None, str(name), 4, user_info)
except win32net.error as exc:
log.error("Failed to update user %s", name)
log.error("nbr: %s", exc.winerror)
@ -305,7 +314,7 @@ def delete(name, purge=False, force=False):
user out and delete user.
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -315,7 +324,7 @@ def delete(name, purge=False, force=False):
"""
# Check if the user exists
try:
user_info = win32net.NetUserGetInfo(None, name, 4)
user_info = win32net.NetUserGetInfo(None, str(name), 4)
except win32net.error as exc:
log.error("User not found: %s", name)
log.error("nbr: %s", exc.winerror)
@ -367,7 +376,7 @@ def delete(name, purge=False, force=False):
# Remove the User Profile directory
if purge:
try:
sid = getUserSid(name)
sid = get_user_sid(name)
win32profile.DeleteProfile(sid)
except pywintypes.error as exc:
(number, context, message) = exc.args
@ -382,7 +391,7 @@ def delete(name, purge=False, force=False):
# And finally remove the user account
try:
win32net.NetUserDel(None, name)
win32net.NetUserDel(None, str(name))
except win32net.error as exc:
log.error("Failed to delete user %s", name)
log.error("nbr: %s", exc.winerror)
@ -394,11 +403,28 @@ def delete(name, purge=False, force=False):
def getUserSid(username):
"""
Deprecated function. Please use get_user_sid instead
CLI Example:
.. code-block:: bash
salt '*' user.get_user_sid jsnuffy
"""
salt.utils.versions.warn_until(
version=3009,
message="'getUserSid' is being deprecated. Please use get_user_sid instead",
)
return get_user_sid(username)
def get_user_sid(username):
"""
Get the Security ID for the user
Args:
username (str): The user name for which to look up the SID
username (str): The username for which to look up the SID
Returns:
str: The user SID
@ -407,7 +433,7 @@ def getUserSid(username):
.. code-block:: bash
salt '*' user.getUserSid jsnuffy
salt '*' user.get_user_sid jsnuffy
"""
domain = win32api.GetComputerName()
if username.find("\\") != -1:
@ -424,12 +450,12 @@ def setpassword(name, password):
Set the user's password
Args:
name (str): The user name for which to set the password
name (str): The username for which to set the password
password (str): The new password
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -445,12 +471,12 @@ def addgroup(name, group):
Add user to a group
Args:
name (str): The user name to add to the group
name (str): The username to add to the group
group (str): The name of the group to which to add the user
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -458,7 +484,7 @@ def addgroup(name, group):
salt '*' user.addgroup jsnuffy 'Power Users'
"""
name = shlex.quote(name)
name = shlex.quote(str(name))
group = shlex.quote(group).lstrip("'").rstrip("'")
user = info(name)
@ -478,12 +504,12 @@ def removegroup(name, group):
Remove user from a group
Args:
name (str): The user name to remove from the group
name (str): The username to remove from the group
group (str): The name of the group from which to remove the user
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -491,7 +517,7 @@ def removegroup(name, group):
salt '*' user.removegroup jsnuffy 'Power Users'
"""
name = shlex.quote(name)
name = shlex.quote(str(name))
group = shlex.quote(group).lstrip("'").rstrip("'")
user = info(name)
@ -519,7 +545,7 @@ def chhome(name, home, **kwargs):
home (str): The new location of the home directory
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -562,7 +588,7 @@ def chprofile(name, profile):
profile (str): The new location of the profile
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -578,12 +604,12 @@ def chfullname(name, fullname):
Change the full name of the user
Args:
name (str): The user name for which to change the full name
name (str): The username for which to change the full name
fullname (str): The new value for the full name
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -600,7 +626,7 @@ def chgroups(name, groups, append=True):
member of only the specified groups
Args:
name (str): The user name for which to change groups
name (str): The username for which to change groups
groups (str, list): A single group or a list of groups to assign to the
user. For multiple groups this can be a comma delimited string or a
@ -611,7 +637,7 @@ def chgroups(name, groups, append=True):
only. Default is True.
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:
@ -623,21 +649,31 @@ def chgroups(name, groups, append=True):
groups = groups.split(",")
groups = [x.strip(" *") for x in groups]
ugrps = set(list_groups(name))
if ugrps == set(groups):
return True
current_groups = set(list_groups(name))
expected_groups = set()
name = shlex.quote(name)
name = shlex.quote(str(name))
if not append:
for group in ugrps:
# We don't want to append to the list, remove groups not in the new set
# of groups
for group in current_groups:
group = shlex.quote(group).lstrip("'").rstrip("'")
if group not in groups:
cmd = f'net localgroup "{group}" {name} /delete'
__salt__["cmd.run_all"](cmd, python_shell=True)
else:
expected_groups.add(group)
else:
# We're appending to the current list of groups. If they already match
# then bail
if current_groups == set(groups):
return True
else:
expected_groups = current_groups.union(set(groups))
for group in groups:
if group in ugrps:
if group in current_groups:
continue
group = shlex.quote(group).lstrip("'").rstrip("'")
cmd = f'net localgroup "{group}" {name} /add'
@ -646,8 +682,9 @@ def chgroups(name, groups, append=True):
log.error(out["stdout"])
return False
agrps = set(list_groups(name))
return len(ugrps - agrps) == 0
new_groups = set(list_groups(name))
return len(expected_groups - new_groups) == 0
def info(name):
@ -677,6 +714,7 @@ def info(name):
- last_logon
- account_disabled
- account_locked
- expiration_date
- password_never_expires
- disallow_change_password
- gid
@ -690,14 +728,14 @@ def info(name):
ret = {}
items = {}
try:
items = win32net.NetUserGetInfo(None, name, 4)
items = win32net.NetUserGetInfo(None, str(name), 4)
except win32net.error:
pass
if items:
groups = []
try:
groups = win32net.NetUserGetLocalGroups(None, name)
groups = win32net.NetUserGetLocalGroups(None, str(name))
except win32net.error:
pass
@ -722,9 +760,15 @@ def info(name):
ret["last_logon"] = datetime.fromtimestamp(items["last_logon"]).strftime(
"%Y-%m-%d %H:%M:%S"
)
ret["expiration_date"] = datetime.fromtimestamp(items["acct_expires"]).strftime(
"%Y-%m-%d %H:%M:%S"
)
# If the value is -1 or 0xFFFFFFFF, it is set to never expire
if items["acct_expires"] == ctypes.c_ulong(win32netcon.TIMEQ_FOREVER).value:
ret["expiration_date"] = "Never"
else:
ret["expiration_date"] = datetime.fromtimestamp(
items["acct_expires"]
).strftime("%Y-%m-%d %H:%M:%S")
ret["expired"] = items["password_expired"] == 1
if not ret["profile"]:
ret["profile"] = _get_userprofile_from_registry(name, ret["uid"])
@ -765,17 +809,17 @@ def _get_userprofile_from_registry(user, sid):
registry
Args:
user (str): The user name, used in debug message
user (str): The username, used in debug message
sid (str): The sid to lookup in the registry
Returns:
str: Profile directory
"""
profile_dir = __utils__["reg.read_value"](
"HKEY_LOCAL_MACHINE",
f"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\{sid}",
"ProfileImagePath",
profile_dir = salt.utils.win_reg.read_value(
hive="HKEY_LOCAL_MACHINE",
key=f"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\{sid}",
vname="ProfileImagePath",
)["vdata"]
log.debug('user %s with sid=%s profile is located at "%s"', user, sid, profile_dir)
return profile_dir
@ -786,7 +830,7 @@ def list_groups(name):
Return a list of groups the named user belongs to
Args:
name (str): The user name for which to list groups
name (str): The username for which to list groups
Returns:
list: A list of groups to which the user belongs
@ -829,9 +873,9 @@ def getent(refresh=False):
return __context__["user.getent"]
ret = []
for user in __salt__["user.list_users"]():
for user in list_users():
stuff = {}
user_info = __salt__["user.info"](user)
user_info = info(user)
stuff["gid"] = ""
stuff["groups"] = user_info["groups"]
@ -885,12 +929,12 @@ def rename(name, new_name):
Change the username for a named user
Args:
name (str): The user name to change
name (str): The username to change
new_name (str): The new name for the current user
Returns:
bool: True if successful, otherwise False
bool: ``True`` if successful, otherwise ``False``.
CLI Example:

View file

@ -446,7 +446,6 @@ def pytest_collection_modifyitems(config, items):
groups_collection_modifyitems(config, items)
from_filenames_collection_modifyitems(config, items)
log.warning("Modifying collected tests to keep track of fixture usage")
timeout_marker_tests_paths = (
str(PYTESTS_DIR / "pkg"),
str(PYTESTS_DIR / "scenarios"),

View file

@ -318,41 +318,35 @@ def test_listen_requisite_resolution_names(state, state_tree):
assert "test_|-listener_service_|-crond_|-mod_watch" in ret
def test_onlyif_req(state, subtests):
onlyif = [{}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.succeed_with_changes", onlyif=onlyif
)
assert ret.result is True
assert ret.comment == "Success!"
onlyif = [{"fun": "test.true"}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.succeed_without_changes", onlyif=onlyif
)
assert ret.result is True
assert not ret.changes
assert ret.comment == "Success!"
onlyif = [{"fun": "test.false"}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.fail_with_changes", onlyif=onlyif
)
assert ret.result is True
assert not ret.changes
assert ret.comment == "onlyif condition is false"
onlyif = [{"fun": "test.true"}]
with subtests.test(onlyif=onlyif):
ret = state.single(
name="onlyif test", fun="test.fail_with_changes", onlyif=onlyif
)
assert ret.result is False
@pytest.mark.parametrize(
"fun,onlyif,result,comment,assert_changes",
(
("test.succeed_with_changes", [{}], True, "Success!", None),
(
"test.succeed_without_changes",
[{"fun": "test.true"}],
True,
"Success!",
False,
),
(
"test.fail_with_changes",
[{"fun": "test.false"}],
True,
"onlyif condition is false",
False,
),
("test.fail_with_changes", [{"fun": "test.true"}], False, "Failure!", True),
),
)
def test_onlyif_req(state, fun, onlyif, result, comment, assert_changes):
ret = state.single(name="onlyif test", fun=fun, onlyif=onlyif)
assert ret.result is result
assert ret.comment == comment
if assert_changes is True:
assert ret.changes
assert ret.comment == "Failure!"
elif assert_changes is False:
assert not ret.changes
def test_listen_requisite_not_exist(state, state_tree):

View file

@ -0,0 +1,341 @@
import pytest
from saltfactories.utils import random_string
from salt.exceptions import CommandExecutionError
pytestmark = [
pytest.mark.destructive_test,
pytest.mark.skip_unless_on_windows,
pytest.mark.windows_whitelisted,
]
@pytest.fixture(scope="module")
def user(modules):
return modules.user
@pytest.fixture
def username_str(user):
_username = random_string("test-account-", uppercase=False)
try:
yield _username
finally:
try:
user.delete(_username, purge=True, force=True)
except Exception: # pylint: disable=broad-except
# The point here is just system cleanup. It can fail if no account was created
pass
@pytest.fixture
def username_int(user):
_username = random_string("", uppercase=False, lowercase=False, digits=True)
try:
yield _username
finally:
try:
user.delete(_username, purge=True, force=True)
except Exception: # pylint: disable=broad-except
# The point here is just system cleanup. It can fail if no account was created
pass
@pytest.fixture
def account_str(user, username_str):
with pytest.helpers.create_account(username=username_str) as account:
user.addgroup(account.username, "Users")
yield account
@pytest.fixture
def account_int(user, username_int):
with pytest.helpers.create_account(username=username_int) as account:
user.addgroup(account.username, "Users")
yield account
def test_add_str(user, username_str):
ret = user.add(name=username_str)
assert ret is True
assert username_str in user.list_users()
def test_add_int(user, username_int):
ret = user.add(name=username_int)
assert ret is True
assert username_int in user.list_users()
def test_addgroup_str(user, account_str):
ret = user.addgroup(account_str.username, "Backup Operators")
assert ret is True
ret = user.info(account_str.username)
assert "Backup Operators" in ret["groups"]
def test_addgroup_int(user, account_int):
ret = user.addgroup(account_int.username, "Backup Operators")
assert ret is True
ret = user.info(account_int.username)
assert "Backup Operators" in ret["groups"]
def test_chfullname_str(user, account_str):
ret = user.chfullname(account_str.username, "New Full Name")
assert ret is True
ret = user.info(account_str.username)
assert ret["fullname"] == "New Full Name"
def test_chfullname_int(user, account_int):
ret = user.chfullname(account_int.username, "New Full Name")
assert ret is True
ret = user.info(account_int.username)
assert ret["fullname"] == "New Full Name"
def test_chgroups_single_str(user, account_str):
groups = ["Backup Operators"]
ret = user.chgroups(account_str.username, groups=groups)
assert ret is True
ret = user.info(account_str.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_single_int(user, account_int):
groups = ["Backup Operators"]
ret = user.chgroups(account_int.username, groups=groups)
assert ret is True
ret = user.info(account_int.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_str(user, account_str):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_str.username, groups=groups)
assert ret is True
ret = user.info(account_str.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_int(user, account_int):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_int.username, groups=groups)
assert ret is True
ret = user.info(account_int.username)
groups.append("Users")
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_append_false_str(user, account_str):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_str.username, groups=groups, append=False)
assert ret is True
ret = user.info(account_str.username)
assert sorted(ret["groups"]) == sorted(groups)
def test_chgroups_list_append_false_int(user, account_int):
groups = ["Backup Operators", "Guests"]
ret = user.chgroups(account_int.username, groups=groups, append=False)
assert ret is True
ret = user.info(account_int.username)
assert sorted(ret["groups"]) == sorted(groups)
def test_chhome_str(user, account_str):
home = r"C:\spongebob\squarepants"
ret = user.chhome(name=account_str.username, home=home)
assert ret is True
ret = user.info(name=account_str.username)
assert ret["home"] == home
def test_chhome_int(user, account_int):
home = r"C:\spongebob\squarepants"
ret = user.chhome(name=account_int.username, home=home)
assert ret is True
ret = user.info(name=account_int.username)
assert ret["home"] == home
def test_chprofile_str(user, account_str):
profile = r"C:\spongebob\squarepants"
ret = user.chprofile(name=account_str.username, profile=profile)
assert ret is True
ret = user.info(name=account_str.username)
assert ret["profile"] == profile
def test_chprofile_int(user, account_int):
profile = r"C:\spongebob\squarepants"
ret = user.chprofile(name=account_int.username, profile=profile)
assert ret is True
ret = user.info(name=account_int.username)
assert ret["profile"] == profile
def test_delete_str(user, account_str):
ret = user.delete(name=account_str.username)
assert ret is True
assert user.info(name=account_str.username) == {}
def test_delete_int(user, account_int):
ret = user.delete(name=account_int.username)
assert ret is True
assert user.info(name=account_int.username) == {}
def test_get_user_sid_str(user, account_str):
ret = user.get_user_sid(account_str.username)
assert ret.startswith("S-1-5")
def test_get_user_sid_int(user, account_int):
ret = user.get_user_sid(account_int.username)
assert ret.startswith("S-1-5")
def test_info_str(user, account_str):
ret = user.info(account_str.username)
assert ret["name"] == account_str.username
assert ret["uid"].startswith("S-1-5")
def test_info_int(user, account_int):
ret = user.info(account_int.username)
assert ret["name"] == account_int.username
assert ret["uid"].startswith("S-1-5")
def test_list_groups_str(user, account_str):
ret = user.list_groups(account_str.username)
assert ret == ["Users"]
def test_list_groups_int(user, account_int):
ret = user.list_groups(account_int.username)
assert ret == ["Users"]
def test_list_users(user):
ret = user.list_users()
assert "Administrator" in ret
def test_removegroup_str(user, account_str):
ret = user.removegroup(account_str.username, "Users")
assert ret is True
ret = user.info(account_str.username)
assert ret["groups"] == []
def test_removegroup_int(user, account_int):
ret = user.removegroup(account_int.username, "Users")
assert ret is True
ret = user.info(account_int.username)
assert ret["groups"] == []
def test_rename_str(user, account_str):
new_name = random_string("test-account-", uppercase=False)
ret = user.rename(name=account_str.username, new_name=new_name)
assert ret is True
assert new_name in user.list_users()
# Let's set it back so that it gets cleaned up...
ret = user.rename(name=new_name, new_name=account_str.username)
assert ret is True
def test_rename_str_missing(user, account_str):
missing = random_string("test-account-", uppercase=False)
with pytest.raises(CommandExecutionError):
user.rename(name=missing, new_name="spongebob")
def test_rename_str_existing(user, account_str):
new_existing = random_string("test-account-", uppercase=False)
ret = user.add(name=new_existing)
assert ret is True
with pytest.raises(CommandExecutionError):
user.rename(name=account_str.username, new_name=new_existing)
# We need to clean this up because it wasn't created in a fixture
ret = user.delete(name=new_existing, purge=True, force=True)
assert ret is True
assert new_existing not in user.list_users()
def test_rename_int(user, account_int):
new_name = random_string("", uppercase=False, lowercase=False, digits=True)
ret = user.rename(name=account_int.username, new_name=new_name)
assert ret is True
assert new_name in user.list_users()
# Let's set it back so that it gets cleaned up...
ret = user.rename(name=new_name, new_name=account_int.username)
assert ret is True
def test_rename_int_missing(user, account_int):
missing = random_string("", uppercase=False, lowercase=False, digits=True)
with pytest.raises(CommandExecutionError):
user.rename(name=missing, new_name="spongebob")
def test_rename_int_existing(user, account_int):
new_existing = random_string("", uppercase=False, lowercase=False, digits=True)
ret = user.add(name=new_existing)
assert ret is True
with pytest.raises(CommandExecutionError):
user.rename(name=account_int.username, new_name=new_existing)
# We need to clean this up because it wasn't created in a fixture
ret = user.delete(name=new_existing, purge=True, force=True)
assert ret is True
assert new_existing not in user.list_users()
def test_setpassword_str(user, account_str):
ret = user.setpassword(account_str.username, password="Sup3rS3cret")
# We have no way of verifying the password was changed on Windows, so the
# best we can do is check that the command completed successfully
assert ret is True
def test_setpassword_int(user, account_int):
ret = user.setpassword(account_int.username, password="Sup3rS3cret")
# We have no way of verifying the password was changed on Windows, so the
# best we can do is check that the command completed successfully
assert ret is True
@pytest.mark.parametrize(
"value_name, new_value, info_field, expected",
[
("description", "New description", "", None),
("homedrive", "H:", "", None),
("logonscript", "\\\\server\\script.cmd", "", None),
("expiration_date", "3/19/2024", "", "2024-03-19 00:00:00"),
("expiration_date", "Never", "", None),
("expired", True, "", None),
("expired", False, "", None),
("account_disabled", True, "", None),
("account_disabled", False, "", None),
("unlock_account", True, "account_locked", False),
("password_never_expires", True, "", None),
("password_never_expires", False, "", None),
("disallow_change_password", True, "", None),
("disallow_change_password", False, "", None),
],
)
def test_update_str(user, value_name, new_value, info_field, expected, account_str):
setting = {value_name: new_value}
ret = user.update(account_str.username, **setting)
assert ret is True
ret = user.info(account_str.username)
info_field = info_field if info_field else value_name
expected = expected if expected is not None else new_value
assert ret[info_field] == expected

View file

@ -6,6 +6,7 @@ user present with custom homedir
"""
import pathlib
import random
import shutil
import sys
@ -43,6 +44,11 @@ def username(sminion):
pass
@pytest.fixture
def guid():
return random.randint(60000, 61000)
@pytest.fixture
def user_home(username, tmp_path):
if salt.utils.platform.is_windows():
@ -429,73 +435,78 @@ def test_user_present_change_optional_groups(
assert user_info["groups"] == [group_1.name]
@pytest.fixture
def user_present_groups(states):
groups = ["testgroup1", "testgroup2"]
try:
yield groups
finally:
for group in groups:
ret = states.group.absent(name=group)
assert ret.result is True
@pytest.mark.skip_unless_on_linux(reason="underlying functionality only runs on Linux")
def test_user_present_no_groups(modules, states, username):
def test_user_present_no_groups(modules, states, username, user_present_groups, guid):
"""
test user.present when groups arg is not
included by the group is created in another
state. Re-run the states to ensure there are
not changes and it is idempotent.
"""
groups = ["testgroup1", "testgroup2"]
try:
ret = states.group.present(name=username, gid=61121)
assert ret.result is True
ret = states.group.present(name=username, gid=guid)
assert ret.result is True
ret = states.user.present(
name=username,
uid=61121,
gid=61121,
)
assert ret.result is True
assert ret.changes["groups"] == [username]
assert ret.changes["name"] == username
ret = states.user.present(
name=username,
uid=guid,
gid=guid,
)
assert ret.result is True
assert ret.changes["groups"] == [username]
assert ret.changes["name"] == username
ret = states.group.present(
name=groups[0],
members=[username],
)
assert ret.changes["members"] == [username]
ret = states.group.present(
name=user_present_groups[0],
members=[username],
)
assert ret.changes["members"] == [username]
ret = states.group.present(
name=groups[1],
members=[username],
)
assert ret.changes["members"] == [username]
ret = states.group.present(
name=user_present_groups[1],
members=[username],
)
assert ret.changes["members"] == [username]
user_info = modules.user.info(username)
assert user_info
assert user_info["groups"] == [username, groups[0], groups[1]]
user_info = modules.user.info(username)
assert user_info
assert user_info["groups"] == [username, *user_present_groups]
# run again, expecting no changes
ret = states.group.present(name=username)
assert ret.result is True
assert ret.changes == {}
# run again, expecting no changes
ret = states.group.present(name=username)
assert ret.result is True
assert ret.changes == {}
ret = states.user.present(
name=username,
)
assert ret.result is True
assert ret.changes == {}
ret = states.user.present(
name=username,
)
assert ret.result is True
assert ret.changes == {}
ret = states.group.present(
name=groups[0],
members=[username],
)
assert ret.result is True
assert ret.changes == {}
ret = states.group.present(
name=user_present_groups[0],
members=[username],
)
assert ret.result is True
assert ret.changes == {}
ret = states.group.present(
name=groups[1],
members=[username],
)
assert ret.result is True
assert ret.changes == {}
ret = states.group.present(
name=user_present_groups[1],
members=[username],
)
assert ret.result is True
assert ret.changes == {}
user_info = modules.user.info(username)
assert user_info
assert user_info["groups"] == [username, groups[0], groups[1]]
finally:
for group in groups:
ret = states.group.absent(name=group)
assert ret.result is True
user_info = modules.user.info(username)
assert user_info
assert user_info["groups"] == [username, *user_present_groups]

View file

@ -36,17 +36,25 @@ def test_salt_downgrade(salt_call_cli, install_salt):
assert "Authentication information could" in use_lib.stderr
# Verify there is a running minion by getting its PID
salt_name = "salt"
if platform.is_windows():
process_name = "salt-minion.exe"
else:
process_name = "salt-minion"
old_pid = None
old_pid = []
# psutil process name only returning first part of the command '/opt/saltstack/'
# need to check all of command line for salt-minion
# ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager']
# and psutil is only returning the salt-minion once
for proc in psutil.process_iter():
if process_name in proc.name():
if psutil.Process(proc.ppid()).name() != process_name:
old_pid = proc.pid
break
assert old_pid is not None
if salt_name in proc.name():
cmdl_strg = " ".join(str(element) for element in proc.cmdline())
if process_name in cmdl_strg:
old_pid.append(proc.pid)
assert old_pid
# Downgrade Salt to the previous version and test
install_salt.install(downgrade=True)
@ -61,13 +69,14 @@ def test_salt_downgrade(salt_call_cli, install_salt):
# Verify there is a new running minion by getting its PID and comparing it
# with the PID from before the upgrade
new_pid = None
new_pid = []
for proc in psutil.process_iter():
if process_name in proc.name():
if psutil.Process(proc.ppid()).name() != process_name:
new_pid = proc.pid
break
assert new_pid is not None
if salt_name in proc.name():
cmdl_strg = " ".join(str(element) for element in proc.cmdline())
if process_name in cmdl_strg:
new_pid.append(proc.pid)
assert new_pid
assert new_pid != old_pid
ret = install_salt.proc.run(bin_file, "--version")

View file

@ -32,17 +32,25 @@ def test_salt_upgrade(salt_call_cli, install_salt):
assert "Authentication information could" in use_lib.stderr
# Verify there is a running minion by getting its PID
salt_name = "salt"
if platform.is_windows():
process_name = "salt-minion.exe"
else:
process_name = "salt-minion"
old_pid = None
old_pid = []
# psutil process name only returning first part of the command '/opt/saltstack/'
# need to check all of command line for salt-minion
# ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager']
# and psutil is only returning the salt-minion once
for proc in psutil.process_iter():
if process_name in proc.name():
if psutil.Process(proc.ppid()).name() != process_name:
old_pid = proc.pid
break
assert old_pid is not None
if salt_name in proc.name():
cmdl_strg = " ".join(str(element) for element in proc.cmdline())
if process_name in cmdl_strg:
old_pid.append(proc.pid)
assert old_pid
# Upgrade Salt from previous version and test
install_salt.install(upgrade=True)
@ -54,13 +62,14 @@ def test_salt_upgrade(salt_call_cli, install_salt):
# Verify there is a new running minion by getting its PID and comparing it
# with the PID from before the upgrade
new_pid = None
new_pid = []
for proc in psutil.process_iter():
if process_name in proc.name():
if psutil.Process(proc.ppid()).name() != process_name:
new_pid = proc.pid
break
assert new_pid is not None
if salt_name in proc.name():
cmdl_strg = " ".join(str(element) for element in proc.cmdline())
if process_name in cmdl_strg:
new_pid.append(proc.pid)
assert new_pid
assert new_pid != old_pid
if install_salt.relenv:

View file

@ -5,6 +5,7 @@
import pytest
import salt.modules.pacmanpkg as pacman
import salt.utils.systemd
from tests.support.mock import MagicMock, patch
@ -166,3 +167,31 @@ def test_group_diff():
):
results = pacman.group_diff("testgroup")
assert results["default"] == {"installed": ["A"], "not installed": ["C"]}
def test_pacman_install_sysupgrade_flag():
"""
Test if the pacman.install function appends the '-u' flag only when sysupgrade is True
"""
mock_parse_targets = MagicMock(return_value=({"somepkg": None}, "repository"))
mock_has_scope = MagicMock(return_value=False)
mock_list_pkgs = MagicMock(return_value={"somepkg": "1.0"})
mock_run_all = MagicMock(return_value={"retcode": 0, "stderr": ""})
with patch.dict(
pacman.__salt__,
{
"cmd.run_all": mock_run_all,
"pkg_resource.parse_targets": mock_parse_targets,
"config.get": MagicMock(return_value=True),
},
), patch.object(salt.utils.systemd, "has_scope", mock_has_scope), patch.object(
pacman, "list_pkgs", mock_list_pkgs
):
pacman.install(name="somepkg", sysupgrade=True)
args, _ = pacman.__salt__["cmd.run_all"].call_args
assert "-u" in args[0]
pacman.install(name="somepkg", sysupgrade=None, refresh=True)
args, _ = pacman.__salt__["cmd.run_all"].call_args
assert "-u" not in args[0]

View file

@ -66,7 +66,7 @@ def temp_state_file(name, contents, saltenv="base", strip_first_newline=True):
saltenv(str):
The salt env to use. Either ``base`` or ``prod``
strip_first_newline(bool):
Wether to strip the initial first new line char or not.
Whether to strip the initial first new line char or not.
"""
if saltenv == "base":
@ -112,7 +112,7 @@ def temp_pillar_file(name, contents, saltenv="base", strip_first_newline=True):
saltenv(str):
The salt env to use. Either ``base`` or ``prod``
strip_first_newline(bool):
Wether to strip the initial first new line char or not.
Whether to strip the initial first new line char or not.
"""
if saltenv == "base":
@ -239,7 +239,7 @@ class TestGroup:
self._delete_group = True
log.debug("Created system group: %s", self)
else:
log.debug("Reusing exising system group: %s", self)
log.debug("Reusing existing system group: %s", self)
if self.members:
ret = self.sminion.functions.group.members(
self.name, members_list=self.members
@ -328,14 +328,14 @@ class TestAccount:
if not self.sminion.functions.user.info(self.username):
log.debug("Creating system account: %s", self)
ret = self.sminion.functions.user.add(self.username)
assert ret
assert ret is True
self._delete_account = True
if salt.utils.platform.is_darwin() or salt.utils.platform.is_windows():
password = self.password
else:
password = self.hashed_password
ret = self.sminion.functions.shadow.set_password(self.username, password)
assert ret
assert ret is True
assert self.username in self.sminion.functions.user.list_users()
if self._group:
self.group.__enter__()
@ -347,7 +347,7 @@ class TestAccount:
if self._delete_account:
log.debug("Created system account: %s", self)
else:
log.debug("Reusing exisintg system account: %s", self)
log.debug("Reusing existing system account: %s", self)
# Run tests
return self
@ -703,7 +703,7 @@ class EntropyGenerator:
kernel_entropy_file = pathlib.Path("/proc/sys/kernel/random/entropy_avail")
kernel_poolsize_file = pathlib.Path("/proc/sys/kernel/random/poolsize")
if not kernel_entropy_file.exists():
log.info("The '%s' file is not avilable", kernel_entropy_file)
log.info("The '%s' file is not available", kernel_entropy_file)
return
self.current_entropy = int(
@ -712,7 +712,7 @@ class EntropyGenerator:
log.info("Available Entropy: %s", self.current_entropy)
if not kernel_poolsize_file.exists():
log.info("The '%s' file is not avilable", kernel_poolsize_file)
log.info("The '%s' file is not available", kernel_poolsize_file)
else:
self.current_poolsize = int(
kernel_poolsize_file.read_text(encoding="utf-8").strip()

View file

@ -32,6 +32,7 @@ ptscripts.set_default_config(DEFAULT_REQS_CONFIG)
ptscripts.register_tools_module("tools.changelog")
ptscripts.register_tools_module("tools.ci")
ptscripts.register_tools_module("tools.docs")
ptscripts.register_tools_module("tools.gh")
ptscripts.register_tools_module("tools.pkg")
ptscripts.register_tools_module("tools.pkg.repo")
ptscripts.register_tools_module("tools.pkg.build")

View file

@ -203,7 +203,7 @@ def runner_types(ctx: Context, event_name: str):
# If this is a pull request coming from the same repository, don't run anything
ctx.info("Pull request is coming from the same repository.")
ctx.info("Not running any jobs since they will run against the branch")
ctx.info("Writing 'runners' to the github outputs file")
ctx.info("Writing 'runners' to the github outputs file:\n", runners)
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"runners={json.dumps(runners)}\n")
ctx.exit(0)
@ -211,7 +211,7 @@ def runner_types(ctx: Context, event_name: str):
# This is a PR from a forked repository
ctx.info("Pull request is not comming from the same repository")
runners["github-hosted"] = runners["self-hosted"] = True
ctx.info("Writing 'runners' to the github outputs file")
ctx.info("Writing 'runners' to the github outputs file:\n", runners)
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"runners={json.dumps(runners)}\n")
ctx.exit(0)
@ -225,7 +225,7 @@ def runner_types(ctx: Context, event_name: str):
# This is running on a forked repository, don't run tests
ctx.info("The push event is on a forked repository")
runners["github-hosted"] = True
ctx.info("Writing 'runners' to the github outputs file")
ctx.info("Writing 'runners' to the github outputs file:\n", runners)
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"runners={json.dumps(runners)}\n")
ctx.exit(0)
@ -233,7 +233,7 @@ def runner_types(ctx: Context, event_name: str):
# Not running on a fork, or the fork has self hosted runners, run everything
ctx.info(f"The {event_name!r} event is from the main repository")
runners["github-hosted"] = runners["self-hosted"] = True
ctx.info("Writing 'runners' to the github outputs file")
ctx.info("Writing 'runners' to the github outputs file:\n", runners)
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"runners={json.dumps(runners)}")
ctx.exit(0)
@ -312,6 +312,11 @@ def define_jobs(
if event_name != "pull_request":
# In this case, all defined jobs should run
with open(github_step_summary, "a", encoding="utf-8") as wfh:
wfh.write("Selected Jobs:\n")
for name, value in sorted(jobs.items()):
wfh.write(f" - `{name}`: {value}\n")
ctx.info("Writing 'jobs' to the github outputs file")
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"jobs={json.dumps(jobs)}\n")
@ -423,7 +428,7 @@ def define_jobs(
with open(github_step_summary, "a", encoding="utf-8") as wfh:
wfh.write("Selected Jobs:\n")
for name, value in sorted(jobs.items()):
wfh.write(f" - {name}: {value}\n")
wfh.write(f" - `{name}`: {value}\n")
ctx.info("Writing 'jobs' to the github outputs file")
with open(github_output, "a", encoding="utf-8") as wfh:
@ -622,7 +627,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
wfh.write(f"{path}\n")
wfh.write("</pre>\n</details>\n")
ctx.info("Writing 'testrun' to the github outputs file")
ctx.info("Writing 'testrun' to the github outputs file:\n", testrun)
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"testrun={json.dumps(testrun)}\n")
@ -655,7 +660,7 @@ def matrix(
"""
_matrix = []
_splits = {
"functional": 3,
"functional": 4,
"integration": 6,
"scenarios": 1,
"unit": 4,
@ -1007,13 +1012,39 @@ def get_pr_test_labels(
pr = gh_event["pull_request"]["number"]
labels = _get_pr_test_labels_from_event_payload(gh_event)
shared_context = tools.utils.get_cicd_shared_context()
mandatory_os_slugs = set(shared_context["mandatory_os_slugs"])
available = set(tools.utils.get_golden_images())
# Add MacOS provided by GitHub
available.update({"macos-12", "macos-13", "macos-13-arm64"})
# Remove mandatory OS'ss
available.difference_update(mandatory_os_slugs)
select_all = set(available)
selected = set()
test_labels = []
if labels:
ctx.info(f"Test labels for pull-request #{pr} on {repository}:")
for name, description in labels:
for name, description in sorted(labels):
ctx.info(f" * [yellow]{name}[/yellow]: {description}")
if name.startswith("test:os:"):
slug = name.split("test:os:", 1)[-1]
if slug not in available and name != "test:os:all":
ctx.warn(
f"The '{slug}' slug exists as a label but not as an available OS."
)
selected.add(slug)
if slug != "all":
available.remove(slug)
continue
test_labels.append(name)
else:
ctx.info(f"No test labels for pull-request #{pr} on {repository}")
if "all" in selected:
selected = select_all
available.clear()
github_output = os.environ.get("GITHUB_OUTPUT")
if github_output is None:
ctx.exit(0)
@ -1021,9 +1052,44 @@ def get_pr_test_labels(
if TYPE_CHECKING:
assert github_output is not None
ctx.info("Writing 'labels' to the github outputs file")
ctx.info("Writing 'labels' to the github outputs file...")
ctx.info("Test Labels:")
if not test_labels:
ctx.info(" * None")
else:
for label in sorted(test_labels):
ctx.info(f" * [yellow]{label}[/yellow]")
ctx.info("* OS Labels:")
if not selected:
ctx.info(" * None")
else:
for slug in sorted(selected):
ctx.info(f" * [yellow]{slug}[/yellow]")
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"labels={json.dumps([label[0] for label in labels])}\n")
wfh.write(f"os-labels={json.dumps([label for label in selected])}\n")
wfh.write(f"test-labels={json.dumps([label for label in test_labels])}\n")
github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY")
if github_step_summary is not None:
with open(github_step_summary, "a", encoding="utf-8") as wfh:
wfh.write("Mandatory OS Test Runs:\n")
for slug in sorted(mandatory_os_slugs):
wfh.write(f"* `{slug}`\n")
wfh.write("\nOptional OS Test Runs(selected by label):\n")
if not selected:
wfh.write("* None\n")
else:
for slug in sorted(selected):
wfh.write(f"* `{slug}`\n")
wfh.write("\nSkipped OS Tests Runs(NOT selected by label):\n")
if not available:
wfh.write("* None\n")
else:
for slug in sorted(available):
wfh.write(f"* `{slug}`\n")
ctx.exit(0)

190
tools/gh.py Normal file
View file

@ -0,0 +1,190 @@
"""
These commands are used to interact and make changes to GitHub.
"""
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
from __future__ import annotations
import logging
from ptscripts import Context, command_group
import tools.utils
import tools.utils.gh
log = logging.getLogger(__name__)
WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows"
TEMPLATES = WORKFLOWS / "templates"
# Define the command group
cgroup = command_group(
name="gh",
help="GitHub Related Commands",
description=__doc__,
)
@cgroup.command(
name="sync-os-labels",
arguments={
"repository": {
"help": "Github repository.",
},
},
)
def sync_os_labels(
ctx: Context, repository: str = "saltstack/salt", color: str = "C2E0C6"
):
"""
Synchronize the GitHub labels to the OS known to be tested.
"""
description_prefix = "Run Tests Against"
known_os = {
"test:os:all": {
"name": "test:os:all",
"color": color,
"description": f"{description_prefix} ALL OS'es",
},
"test:os:macos-12": {
"name": "test:os:macos-12",
"color": color,
"description": f"{description_prefix} MacOS 12",
},
"test:os:macos-13": {
"name": "test:os:macos-13",
"color": color,
"description": f"{description_prefix} MacOS 13",
},
"test:os:macos-13-arm64": {
"name": "test:os:macos-13-arm64",
"color": color,
"description": f"{description_prefix} MacOS 13 Arm64",
},
}
for slug, details in tools.utils.get_golden_images().items():
name = f"test:os:{slug}"
ami_description = (
details["ami_description"]
.replace("CI Image of ", "")
.replace("arm64", "Arm64")
)
known_os[name] = {
"name": name,
"color": color,
"description": f"{description_prefix} {ami_description}",
}
ctx.info(known_os)
github_token = tools.utils.gh.get_github_token(ctx)
if github_token is None:
ctx.error("Querying labels requires being authenticated to GitHub.")
ctx.info(
"Either set 'GITHUB_TOKEN' to a valid token, or configure the 'gh' tool such that "
"'gh auth token' returns a token."
)
ctx.exit(1)
existing_labels = set()
labels_to_update = []
labels_to_delete = set()
shared_context = tools.utils.get_cicd_shared_context()
for slug in shared_context["mandatory_os_slugs"]:
label = f"test:os:{slug}"
labels_to_delete.add(label)
headers = {
"Accept": "application/vnd.github+json",
"Authorization": f"Bearer {github_token}",
"X-GitHub-Api-Version": "2022-11-28",
}
with ctx.web as web:
web.headers.update(headers)
page = 0
while True:
page += 1
params = {
"per_page": 100,
"page": page,
}
ret = web.get(
f"https://api.github.com/repos/{repository}/labels",
params=params,
)
if ret.status_code != 200:
ctx.error(
f"Failed to get the labels for repository {repository!r}: {ret.reason}"
)
ctx.exit(1)
data = ret.json()
if not data:
break
for details in data:
label = details["name"]
if not label.startswith("test:os:"):
continue
existing_labels.add(label)
if label not in known_os:
labels_to_delete.add(details["name"])
continue
if label in known_os:
update_details = known_os.pop(label)
if label in labels_to_delete:
continue
for key, value in update_details.items():
if details[key] != value:
labels_to_update.append(update_details)
break
continue
for slug in shared_context["mandatory_os_slugs"]:
label = f"test:os:{slug}"
if label in known_os:
labels_to_delete.add(label)
known_os.pop(label)
if label in labels_to_update:
labels_to_delete.add(label)
known_os.pop(label)
for label in labels_to_delete:
if label not in existing_labels:
continue
ctx.info(f"Deleting label '{label}' ...")
ret = web.delete(
f"https://api.github.com/repos/{repository}/labels/{label}",
)
if ret.status_code != 204:
ctx.error(
f"Failed to delete label '{label}' for repository {repository!r}: {ret.reason}"
)
ctx.info("Updating OS Labels in GitHub...")
for details in labels_to_update:
label = details["name"]
ctx.info(f"Updating label '{label}' ...")
ret = web.patch(
f"https://api.github.com/repos/{repository}/labels/{label}",
params=details,
)
if ret.status_code != 200:
ctx.error(
f"Failed to update label '{details['name']}' for repository {repository!r}: {ret.reason}"
)
for label, details in known_os.items():
details["name"] = label
ctx.info(f"Creating label: {details} ...")
ret = web.post(
f"https://api.github.com/repos/{repository}/labels",
json=details,
)
if ret.status_code != 201:
ctx.error(
f"Failed to create label '{details['name']}' for repository {repository!r}: {ret.reason}"
)
print(ret.content)

View file

@ -14,7 +14,6 @@ import tarfile
import zipfile
from typing import TYPE_CHECKING
import yaml
from ptscripts import Context, command_group
import tools.utils
@ -30,10 +29,6 @@ build = command_group(
)
def _get_shared_constants():
return yaml.safe_load(tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text())
@build.command(
name="deb",
arguments={
@ -77,7 +72,7 @@ def debian(
)
ctx.exit(1)
ctx.info("Building the package from the source files")
shared_constants = _get_shared_constants()
shared_constants = tools.utils.get_cicd_shared_context()
if not python_version:
python_version = shared_constants["python_version"]
if not relenv_version:
@ -149,7 +144,7 @@ def rpm(
)
ctx.exit(1)
ctx.info("Building the package from the source files")
shared_constants = _get_shared_constants()
shared_constants = tools.utils.get_cicd_shared_context()
if not python_version:
python_version = shared_constants["python_version"]
if not relenv_version:
@ -234,7 +229,7 @@ def macos(
if not onedir:
# Prep the salt onedir if not building from an existing one
shared_constants = _get_shared_constants()
shared_constants = tools.utils.get_cicd_shared_context()
if not python_version:
python_version = shared_constants["python_version"]
if not relenv_version:
@ -323,7 +318,7 @@ def windows(
assert salt_version is not None
assert arch is not None
shared_constants = _get_shared_constants()
shared_constants = tools.utils.get_cicd_shared_context()
if not python_version:
python_version = shared_constants["python_version"]
if not relenv_version:
@ -490,7 +485,7 @@ def onedir_dependencies(
if platform != "macos" and arch == "arm64":
arch = "aarch64"
shared_constants = _get_shared_constants()
shared_constants = tools.utils.get_cicd_shared_context()
if not python_version:
python_version = shared_constants["python_version"]
if not relenv_version:
@ -629,7 +624,7 @@ def salt_onedir(
if platform == "darwin":
platform = "macos"
shared_constants = _get_shared_constants()
shared_constants = tools.utils.get_cicd_shared_context()
if not relenv_version:
relenv_version = shared_constants["relenv_version"]
if TYPE_CHECKING:

View file

@ -5,25 +5,32 @@ These commands are used for our GitHub Actions workflows.
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
from __future__ import annotations
import json
import logging
import shutil
import sys
from typing import TYPE_CHECKING, cast
import yaml
from jinja2 import Environment, FileSystemLoader
from jinja2 import Environment, FileSystemLoader, StrictUndefined
from ptscripts import Context, command_group
import tools.utils
from tools.utils import Linux, MacOS, Windows
if sys.version_info < (3, 11):
from typing_extensions import TypedDict
else:
from typing import TypedDict # pylint: disable=no-name-in-module
log = logging.getLogger(__name__)
WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows"
TEMPLATES = WORKFLOWS / "templates"
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open(
"r", encoding="utf-8"
) as rfh:
AMIS = json.load(rfh)
class PlatformDefinitions(TypedDict):
linux: list[Linux]
macos: list[MacOS]
windows: list[Windows]
# Define the command group
@ -97,94 +104,299 @@ def generate_workflows(ctx: Context):
"template": "build-deps-ci-action.yml",
},
}
test_salt_listing: dict[str, list[tuple[str, ...]]] = {
"linux": [
("almalinux-8", "Alma Linux 8", "x86_64", "no-fips"),
("almalinux-8-arm64", "Alma Linux 8 Arm64", "arm64", "no-fips"),
("almalinux-9", "Alma Linux 9", "x86_64", "no-fips"),
("almalinux-9-arm64", "Alma Linux 9 Arm64", "arm64", "no-fips"),
("amazonlinux-2", "Amazon Linux 2", "x86_64", "no-fips"),
("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "arm64", "no-fips"),
("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "no-fips"),
("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "arm64", "no-fips"),
("archlinux-lts", "Arch Linux LTS", "x86_64", "no-fips"),
("centos-7", "CentOS 7", "x86_64", "no-fips"),
("debian-10", "Debian 10", "x86_64", "no-fips"),
("debian-11", "Debian 11", "x86_64", "no-fips"),
("debian-11-arm64", "Debian 11 Arm64", "arm64", "no-fips"),
("debian-12", "Debian 12", "x86_64", "no-fips"),
("debian-12-arm64", "Debian 12 Arm64", "arm64", "no-fips"),
("fedora-39", "Fedora 39", "x86_64", "no-fips"),
("opensuse-15", "Opensuse 15", "x86_64", "no-fips"),
("photonos-4", "Photon OS 4", "x86_64", "fips"),
("photonos-4-arm64", "Photon OS 4 Arm64", "arm64", "fips"),
("photonos-5", "Photon OS 5", "x86_64", "fips"),
("photonos-5-arm64", "Photon OS 5 Arm64", "arm64", "fips"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "no-fips"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "arm64", "no-fips"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "no-fips"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "arm64", "no-fips"),
],
"macos": [
("macos-12", "macOS 12", "x86_64"),
("macos-13", "macOS 13", "x86_64"),
("macos-13-xlarge", "macOS 13 Arm64", "arm64"),
],
"windows": [
("windows-2016", "Windows 2016", "amd64"),
("windows-2019", "Windows 2019", "amd64"),
("windows-2022", "Windows 2022", "amd64"),
],
}
test_salt_listing = PlatformDefinitions(
{
"linux": [
Linux(slug="almalinux-8", display_name="Alma Linux 8", arch="x86_64"),
Linux(
slug="almalinux-8-arm64",
display_name="Alma Linux 8 Arm64",
arch="arm64",
),
Linux(slug="almalinux-9", display_name="Alma Linux 9", arch="x86_64"),
Linux(
slug="almalinux-9-arm64",
display_name="Alma Linux 9 Arm64",
arch="arm64",
),
Linux(
slug="amazonlinux-2", display_name="Amazon Linux 2", arch="x86_64"
),
Linux(
slug="amazonlinux-2-arm64",
display_name="Amazon Linux 2 Arm64",
arch="arm64",
),
Linux(
slug="amazonlinux-2023",
display_name="Amazon Linux 2023",
arch="x86_64",
),
Linux(
slug="amazonlinux-2023-arm64",
display_name="Amazon Linux 2023 Arm64",
arch="arm64",
),
Linux(
slug="archlinux-lts", display_name="Arch Linux LTS", arch="x86_64"
),
Linux(slug="centos-7", display_name="CentOS 7", arch="x86_64"),
Linux(slug="debian-10", display_name="Debian 10", arch="x86_64"),
Linux(slug="debian-11", display_name="Debian 11", arch="x86_64"),
Linux(
slug="debian-11-arm64", display_name="Debian 11 Arm64", arch="arm64"
),
Linux(slug="debian-12", display_name="Debian 12", arch="x86_64"),
Linux(
slug="debian-12-arm64", display_name="Debian 12 Arm64", arch="arm64"
),
Linux(slug="fedora-39", display_name="Fedora 39", arch="x86_64"),
Linux(slug="opensuse-15", display_name="Opensuse 15", arch="x86_64"),
Linux(
slug="photonos-4",
display_name="Photon OS 4",
arch="x86_64",
fips=True,
),
Linux(
slug="photonos-4-arm64",
display_name="Photon OS 4 Arm64",
arch="arm64",
fips=True,
),
Linux(
slug="photonos-5",
display_name="Photon OS 5",
arch="x86_64",
fips=True,
),
Linux(
slug="photonos-5-arm64",
display_name="Photon OS 5 Arm64",
arch="arm64",
fips=True,
),
Linux(slug="ubuntu-20.04", display_name="Ubuntu 20.04", arch="x86_64"),
Linux(
slug="ubuntu-20.04-arm64",
display_name="Ubuntu 20.04 Arm64",
arch="arm64",
),
Linux(slug="ubuntu-22.04", display_name="Ubuntu 22.04", arch="x86_64"),
Linux(
slug="ubuntu-22.04-arm64",
display_name="Ubuntu 22.04 Arm64",
arch="arm64",
),
],
"macos": [
MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"),
MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"),
MacOS(
slug="macos-13-arm64",
display_name="macOS 13 Arm64",
arch="arm64",
runner="macos-13-xlarge",
),
],
"windows": [
Windows(slug="windows-2016", display_name="Windows 2016", arch="amd64"),
Windows(slug="windows-2019", display_name="Windows 2019", arch="amd64"),
Windows(slug="windows-2022", display_name="Windows 2022", arch="amd64"),
],
}
)
test_salt_pkg_listing = {
"linux": [
("almalinux-8", "Alma Linux 8", "x86_64", "rpm", "no-fips"),
("almalinux-8-arm64", "Alma Linux 8 Arm64", "arm64", "rpm", "no-fips"),
("almalinux-9", "Alma Linux 9", "x86_64", "rpm", "no-fips"),
("almalinux-9-arm64", "Alma Linux 9 Arm64", "arm64", "rpm", "no-fips"),
("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm", "no-fips"),
(
"amazonlinux-2-arm64",
"Amazon Linux 2 Arm64",
"arm64",
"rpm",
"no-fips",
),
("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "rpm", "no-fips"),
(
"amazonlinux-2023-arm64",
"Amazon Linux 2023 Arm64",
"arm64",
"rpm",
"no-fips",
),
("centos-7", "CentOS 7", "x86_64", "rpm", "no-fips"),
("debian-10", "Debian 10", "x86_64", "deb", "no-fips"),
("debian-11", "Debian 11", "x86_64", "deb", "no-fips"),
("debian-11-arm64", "Debian 11 Arm64", "arm64", "deb", "no-fips"),
("debian-12", "Debian 12", "x86_64", "deb", "no-fips"),
("debian-12-arm64", "Debian 12 Arm64", "arm64", "deb", "no-fips"),
("photonos-4", "Photon OS 4", "x86_64", "rpm", "fips"),
("photonos-4-arm64", "Photon OS 4 Arm64", "arm64", "rpm", "fips"),
("photonos-5", "Photon OS 5", "x86_64", "rpm", "fips"),
("photonos-5-arm64", "Photon OS 5 Arm64", "arm64", "rpm", "fips"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb", "no-fips"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "arm64", "deb", "no-fips"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb", "no-fips"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "arm64", "deb", "no-fips"),
],
"macos": [
("macos-12", "macOS 12", "x86_64"),
("macos-13", "macOS 13", "x86_64"),
("macos-13-xlarge", "macOS 13 Arm64", "arm64"),
],
"windows": [
("windows-2016", "Windows 2016", "amd64"),
("windows-2019", "Windows 2019", "amd64"),
("windows-2022", "Windows 2022", "amd64"),
],
}
test_salt_pkg_listing = PlatformDefinitions(
{
"linux": [
Linux(
slug="almalinux-8",
display_name="Alma Linux 8",
arch="x86_64",
pkg_type="rpm",
),
Linux(
slug="almalinux-8-arm64",
display_name="Alma Linux 8 Arm64",
arch="arm64",
pkg_type="rpm",
),
Linux(
slug="almalinux-9",
display_name="Alma Linux 9",
arch="x86_64",
pkg_type="rpm",
),
Linux(
slug="almalinux-9-arm64",
display_name="Alma Linux 9 Arm64",
arch="arm64",
pkg_type="rpm",
),
Linux(
slug="amazonlinux-2",
display_name="Amazon Linux 2",
arch="x86_64",
pkg_type="rpm",
),
Linux(
slug="amazonlinux-2-arm64",
display_name="Amazon Linux 2 Arm64",
arch="arm64",
pkg_type="rpm",
),
Linux(
slug="amazonlinux-2023",
display_name="Amazon Linux 2023",
arch="x86_64",
pkg_type="rpm",
),
Linux(
slug="amazonlinux-2023-arm64",
display_name="Amazon Linux 2023 Arm64",
arch="arm64",
pkg_type="rpm",
),
Linux(
slug="centos-7",
display_name="CentOS 7",
arch="x86_64",
pkg_type="rpm",
),
Linux(
slug="debian-10",
display_name="Debian 10",
arch="x86_64",
pkg_type="deb",
),
Linux(
slug="debian-11",
display_name="Debian 11",
arch="x86_64",
pkg_type="deb",
),
Linux(
slug="debian-11-arm64",
display_name="Debian 11 Arm64",
arch="arm64",
pkg_type="deb",
),
Linux(
slug="debian-12",
display_name="Debian 12",
arch="x86_64",
pkg_type="deb",
),
Linux(
slug="debian-12-arm64",
display_name="Debian 12 Arm64",
arch="arm64",
pkg_type="deb",
),
Linux(
slug="photonos-4",
display_name="Photon OS 4",
arch="x86_64",
pkg_type="rpm",
fips=True,
),
Linux(
slug="photonos-4-arm64",
display_name="Photon OS 4 Arm64",
arch="arm64",
pkg_type="rpm",
fips=True,
),
Linux(
slug="photonos-5",
display_name="Photon OS 5",
arch="x86_64",
pkg_type="rpm",
fips=True,
),
Linux(
slug="photonos-5-arm64",
display_name="Photon OS 5 Arm64",
arch="arm64",
pkg_type="rpm",
fips=True,
),
Linux(
slug="ubuntu-20.04",
display_name="Ubuntu 20.04",
arch="x86_64",
pkg_type="deb",
),
Linux(
slug="ubuntu-20.04-arm64",
display_name="Ubuntu 20.04 Arm64",
arch="arm64",
pkg_type="deb",
),
Linux(
slug="ubuntu-22.04",
display_name="Ubuntu 22.04",
arch="x86_64",
pkg_type="deb",
),
Linux(
slug="ubuntu-22.04-arm64",
display_name="Ubuntu 22.04 Arm64",
arch="arm64",
pkg_type="deb",
),
],
"macos": [
MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"),
MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"),
MacOS(
slug="macos-13-arm64",
display_name="macOS 13 Arm64",
arch="arm64",
runner="macos-13-xlarge",
),
],
"windows": [
Windows(
slug="windows-2016",
display_name="Windows 2016",
arch="amd64",
pkg_type="NSIS",
),
Windows(
slug="windows-2016",
display_name="Windows 2016",
arch="amd64",
pkg_type="MSI",
),
Windows(
slug="windows-2019",
display_name="Windows 2019",
arch="amd64",
pkg_type="NSIS",
),
Windows(
slug="windows-2019",
display_name="Windows 2019",
arch="amd64",
pkg_type="MSI",
),
Windows(
slug="windows-2022",
display_name="Windows 2022",
arch="amd64",
pkg_type="NSIS",
),
Windows(
slug="windows-2022",
display_name="Windows 2022",
arch="amd64",
pkg_type="MSI",
),
],
}
)
build_ci_deps_listing = {
"linux": [
@ -193,17 +405,19 @@ def generate_workflows(ctx: Context):
],
"macos": [
("x86_64", "macos-12"),
("arm64", "macos-13-xlarge"),
("arm64", "macos-13-arm64"),
],
"windows": [
("amd64", "windows-2022"),
],
}
test_salt_pkg_downloads_listing: dict[str, list[tuple[str, str, str]]] = {
"linux": [],
"macos": [],
"windows": [],
}
test_salt_pkg_downloads_listing = PlatformDefinitions(
{
"linux": [],
"macos": [],
"windows": [],
}
)
rpm_slugs = (
"almalinux",
"amazonlinux",
@ -216,7 +430,7 @@ def generate_workflows(ctx: Context):
"opensuse-15",
"windows",
)
for slug in sorted(AMIS):
for slug in sorted(tools.utils.get_golden_images()):
if slug.startswith(linux_skip_pkg_download_tests):
continue
if "arm64" in slug:
@ -226,23 +440,62 @@ def generate_workflows(ctx: Context):
if slug.startswith(rpm_slugs) and arch == "arm64":
# While we maintain backwards compatible urls
test_salt_pkg_downloads_listing["linux"].append(
(slug, "aarch64", "package")
Linux(
slug=slug,
arch="aarch64",
pkg_type="package",
)
)
test_salt_pkg_downloads_listing["linux"].append((slug, arch, "package"))
test_salt_pkg_downloads_listing["linux"].append(
Linux(
slug=slug,
arch=arch,
pkg_type="package",
)
)
if slug.startswith("ubuntu-22"):
test_salt_pkg_downloads_listing["linux"].append((slug, arch, "onedir"))
for slug, display_name, arch in test_salt_listing["macos"]:
test_salt_pkg_downloads_listing["macos"].append((slug, arch, "package"))
for slug, display_name, arch in test_salt_listing["macos"][-1:]:
test_salt_pkg_downloads_listing["macos"].append((slug, arch, "onedir"))
for slug, display_name, arch in test_salt_listing["windows"][-1:]:
test_salt_pkg_downloads_listing["linux"].append(
Linux(
slug=slug,
arch=arch,
pkg_type="onedir",
)
)
for mac in test_salt_listing["macos"]:
test_salt_pkg_downloads_listing["macos"].append(
MacOS(
slug=mac.slug,
arch=mac.arch,
display_name=mac.display_name,
pkg_type="package",
runner=mac.runner,
)
)
for mac in test_salt_listing["macos"][-1:]:
test_salt_pkg_downloads_listing["macos"].append(
MacOS(
slug=mac.slug,
arch=mac.arch,
display_name=mac.display_name,
pkg_type="onedir",
runner=mac.runner,
)
)
for win in test_salt_listing["windows"][-1:]:
for pkg_type in ("nsis", "msi", "onedir"):
test_salt_pkg_downloads_listing["windows"].append((slug, arch, pkg_type))
test_salt_pkg_downloads_listing["windows"].append(
Windows(
slug=win.slug,
arch=win.arch,
display_name=win.display_name,
pkg_type=pkg_type,
)
)
test_salt_pkg_downloads_needs_slugs = set()
for platform in test_salt_pkg_downloads_listing:
for _, arch, _ in test_salt_pkg_downloads_listing[platform]:
test_salt_pkg_downloads_needs_slugs.add("build-ci-deps")
test_salt_pkg_downloads_needs_slugs = {"build-ci-deps"}
# for platform in test_salt_pkg_downloads_listing:
# for _, arch, _ in test_salt_pkg_downloads_listing[platform]:
# test_salt_pkg_downloads_needs_slugs.add("build-ci-deps")
build_rpms_listing = []
rpm_os_versions: dict[str, list[str]] = {
@ -251,7 +504,7 @@ def generate_workflows(ctx: Context):
"photon": [],
"redhat": [],
}
for slug in sorted(AMIS):
for slug in sorted(tools.utils.get_golden_images()):
if slug.endswith("-arm64"):
continue
if not slug.startswith(
@ -274,7 +527,7 @@ def generate_workflows(ctx: Context):
build_rpms_listing.append((distro, release, arch))
build_debs_listing = []
for slug in sorted(AMIS):
for slug in sorted(tools.utils.get_golden_images()):
if not slug.startswith(("debian-", "ubuntu-")):
continue
if slug.endswith("-arm64"):
@ -292,6 +545,7 @@ def generate_workflows(ctx: Context):
"jinja2.ext.do",
],
loader=FileSystemLoader(str(TEMPLATES)),
undefined=StrictUndefined,
)
for workflow_name, details in workflows.items():
if TYPE_CHECKING:
@ -304,12 +558,11 @@ def generate_workflows(ctx: Context):
f"Generating '{workflow_path.relative_to(tools.utils.REPO_ROOT)}' from "
f"template '{template_path.relative_to(tools.utils.REPO_ROOT)}' ..."
)
workflow_slug = details.get("slug") or workflow_name.lower().replace(" ", "-")
context = {
"template": template_path.relative_to(tools.utils.REPO_ROOT),
"workflow_name": workflow_name,
"workflow_slug": (
details.get("slug") or workflow_name.lower().replace(" ", "-")
),
"workflow_slug": workflow_slug,
"includes": includes,
"conclusion_needs": NeedsTracker(),
"test_salt_needs": NeedsTracker(),
@ -327,9 +580,7 @@ def generate_workflows(ctx: Context):
"build_rpms_listing": build_rpms_listing,
"build_debs_listing": build_debs_listing,
}
shared_context = yaml.safe_load(
tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text()
)
shared_context = tools.utils.get_cicd_shared_context()
for key, value in shared_context.items():
context[key.replace("-", "_")] = value
loaded_template = env.get_template(template_path.name)

View file

@ -5,7 +5,6 @@ These commands are related to the test suite.
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
from __future__ import annotations
import json
import logging
from typing import TYPE_CHECKING
@ -15,11 +14,6 @@ import tools.utils
import tools.utils.gh
from tools.utils import ExitCode
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open(
"r", encoding="utf-8"
) as rfh:
OS_SLUGS = sorted(json.load(rfh))
log = logging.getLogger(__name__)
# Define the command group
@ -57,7 +51,7 @@ ts = command_group(name="ts", help="Test Suite Related Commands", description=__
"slug": {
"help": "The OS slug",
"required": True,
"choices": OS_SLUGS,
"choices": sorted(tools.utils.get_golden_images()),
},
"pkg": {
"help": "Also download package test artifacts",

View file

@ -5,7 +5,6 @@ These commands are related to downloading test suite CI artifacts.
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
from __future__ import annotations
import json
import logging
import pathlib
from typing import TYPE_CHECKING
@ -15,11 +14,6 @@ from ptscripts import Context, command_group
import tools.utils
import tools.utils.gh
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open(
"r", encoding="utf-8"
) as rfh:
OS_SLUGS = sorted(json.load(rfh))
log = logging.getLogger(__name__)
@ -135,7 +129,7 @@ def download_nox_artifact(
"slug": {
"help": "The OS slug",
"required": True,
"choices": OS_SLUGS,
"choices": sorted(tools.utils.get_golden_images()),
},
"repository": {
"help": "The repository to query, e.g. saltstack/salt",

View file

@ -1,22 +1,18 @@
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated,bad-whitespace
from __future__ import annotations
import fnmatch
import hashlib
import json
import os
import pathlib
import shutil
import sys
import tempfile
import zipfile
from datetime import datetime
from enum import IntEnum
from typing import Any
from functools import cache
import boto3
import attr
import packaging.version
from botocore.exceptions import ClientError
import yaml
from ptscripts import Context
from rich.progress import (
BarColumn,
@ -45,6 +41,36 @@ class ExitCode(IntEnum):
SOFT_FAIL = 2
@attr.s(frozen=True, slots=True)
class OS:
platform: str = attr.ib()
slug: str = attr.ib()
display_name: str = attr.ib(default=None)
arch: str = attr.ib(default=None)
pkg_type: str = attr.ib(default=None)
@attr.s(frozen=True, slots=True)
class Linux(OS):
platform: str = attr.ib(default="linux")
fips: bool = attr.ib(default=False)
@attr.s(frozen=True, slots=True)
class MacOS(OS):
runner: str = attr.ib()
platform: str = attr.ib(default="macos")
@runner.default
def _default_runner(self):
return self.slug
@attr.s(frozen=True, slots=True)
class Windows(OS):
platform: str = attr.ib(default="windows")
def create_progress_bar(file_progress: bool = False, **kwargs):
if file_progress:
return Progress(
@ -287,3 +313,23 @@ def get_platform_and_arch_from_slug(slug: str) -> tuple[str, str]:
else:
arch = "x86_64"
return platform, arch
@cache
def get_cicd_shared_context():
"""
Return the CI/CD shared context file contents.
"""
shared_context_file = REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml"
return yaml.safe_load(shared_context_file.read_text())
@cache
def get_golden_images():
"""
Return the golden images information stored on file.
"""
with REPO_ROOT.joinpath("cicd", "golden-images.json").open(
"r", encoding="utf-8"
) as rfh:
return json.load(rfh)

View file

@ -47,10 +47,6 @@ if TYPE_CHECKING:
log = logging.getLogger(__name__)
STATE_DIR = tools.utils.REPO_ROOT / ".vms-state"
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open(
"r", encoding="utf-8"
) as rfh:
AMIS = json.load(rfh)
REPO_CHECKOUT_ID = hashlib.sha256(
"|".join(list(platform.uname()) + [str(tools.utils.REPO_ROOT)]).encode()
).hexdigest()
@ -67,7 +63,7 @@ vm.add_argument("--region", help="The AWS region.", default=AWS_REGION)
"name": {
"help": "The VM Name",
"metavar": "VM_NAME",
"choices": list(AMIS),
"choices": sorted(tools.utils.get_golden_images()),
},
"key_name": {
"help": "The SSH key name. Will default to TOOLS_KEY_NAME in environment",
@ -791,10 +787,11 @@ class VM:
@config.default
def _config_default(self):
golden_images = tools.utils.get_golden_images()
config = AMIConfig(
**{
key: value
for (key, value) in AMIS[self.name].items()
for (key, value) in golden_images[self.name].items()
if key in AMIConfig.__annotations__
}
)