Merge branch 'master' into 18907_lazy_unmount_when_fails

This commit is contained in:
Gareth J. Greenaway 2023-10-24 15:59:35 -07:00 committed by GitHub
commit 9325ed9cba
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
74 changed files with 6054 additions and 1991 deletions

View file

@ -58,37 +58,37 @@ flags:
salt:
paths:
- salt/
carryforward: true # https://docs.codecov.io/docs/carryforward-flags
carryforward: false # https://docs.codecov.io/docs/carryforward-flags
tests:
paths:
- tests/
carryforward: true
carryforward: false
pkg:
paths:
- pkg/tests
carryforward: true
carryforward: false
unit:
paths:
- tests/unit
- tests/pytests/unit
carryforward: true
carryforward: false
functional:
paths:
- tests/pytests/functional
carryforward: true
carryforward: false
scenarios:
paths:
- tests/pytests/scenarios
carryforward: true
carryforward: false
integration:
paths:
- tests/integration
- tests/pytests/integration
carryforward: true
carryforward: false
comment:
layout: "reach, diff, flags, files"
behavior: default # Comment posting behaviour
behavior: new # Comment posting behaviour
# default: update, if exists. Otherwise post new.
# once: update, if exists. Otherwise post new. Skip if deleted.
# new: delete old and post new.

View file

@ -121,12 +121,12 @@ jobs:
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox -e "pre-archive-cleanup(pkg=False)"
nox --force-color -e "pre-archive-cleanup(pkg=False)"
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox -e compress-dependencies -- ${{ inputs.distro-slug }}
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3

View file

@ -1124,6 +1124,23 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-3-arm64-ci-deps:
name: Photon OS 3 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-4-ci-deps:
name: Photon OS 4 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1158,6 +1175,40 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-ci-deps:
name: Photon OS 5 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-arm64-ci-deps:
name: Photon OS 5 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
ubuntu-2004-ci-deps:
name: Ubuntu 20.04 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1446,6 +1497,28 @@ jobs:
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-arm64-pkg-tests:
name: Photon OS 3 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1490,6 +1563,50 @@ jobs:
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-pkg-tests:
name: Photon OS 5 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-arm64-pkg-tests:
name: Photon OS 5 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2132,6 +2249,26 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
photonos-3-arm64:
name: Photon OS 3 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
photonos-4:
name: Photon OS 4 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2172,6 +2309,46 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
photonos-5:
name: Photon OS 5 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
photonos-5-arm64:
name: Photon OS 5 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
ubuntu-2004:
name: Ubuntu 20.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2286,8 +2463,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps
@ -2312,8 +2492,11 @@ jobs:
- fedora-38
- opensuse-15
- photonos-3
- photonos-3-arm64
- photonos-4
- photonos-4-arm64
- photonos-5
- photonos-5-arm64
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
@ -2327,6 +2510,10 @@ jobs:
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
id: python-tools-scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Nox
run: |
python3 -m pip install 'nox==2022.8.7'
@ -2343,6 +2530,28 @@ jobs:
- name: Display structure of downloaded files
run: tree -a artifacts/
- name: Install Codecov CLI
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
mv ./codecov /usr/local/bin/
- name: Create XML Coverage Reports
run: |
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/
- name: Combine Code Coverage
run: |
nox --force-color -e combine-coverage
@ -2445,8 +2654,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps
@ -2471,8 +2683,11 @@ jobs:
- fedora-38
- opensuse-15
- photonos-3
- photonos-3-arm64
- photonos-4
- photonos-4-arm64
- photonos-5
- photonos-5-arm64
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
@ -2487,8 +2702,11 @@ jobs:
- debian-12-pkg-tests
- debian-12-arm64-pkg-tests
- photonos-3-pkg-tests
- photonos-3-arm64-pkg-tests
- photonos-4-pkg-tests
- photonos-4-arm64-pkg-tests
- photonos-5-pkg-tests
- photonos-5-arm64-pkg-tests
- ubuntu-2004-pkg-tests
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests

View file

@ -1180,6 +1180,23 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-3-arm64-ci-deps:
name: Photon OS 3 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-4-ci-deps:
name: Photon OS 4 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1214,6 +1231,40 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-ci-deps:
name: Photon OS 5 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-arm64-ci-deps:
name: Photon OS 5 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
ubuntu-2004-ci-deps:
name: Ubuntu 20.04 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1502,6 +1553,28 @@ jobs:
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-arm64-pkg-tests:
name: Photon OS 3 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1546,6 +1619,50 @@ jobs:
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-pkg-tests:
name: Photon OS 5 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-arm64-pkg-tests:
name: Photon OS 5 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2188,6 +2305,26 @@ jobs:
skip-code-coverage: false
skip-junit-reports: false
photonos-3-arm64:
name: Photon OS 3 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
photonos-4:
name: Photon OS 4 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2228,6 +2365,46 @@ jobs:
skip-code-coverage: false
skip-junit-reports: false
photonos-5:
name: Photon OS 5 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
photonos-5-arm64:
name: Photon OS 5 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
ubuntu-2004:
name: Ubuntu 20.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2342,8 +2519,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps
@ -2368,8 +2548,11 @@ jobs:
- fedora-38
- opensuse-15
- photonos-3
- photonos-3-arm64
- photonos-4
- photonos-4-arm64
- photonos-5
- photonos-5-arm64
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
@ -2383,6 +2566,10 @@ jobs:
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
id: python-tools-scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Nox
run: |
python3 -m pip install 'nox==2022.8.7'
@ -2399,6 +2586,28 @@ jobs:
- name: Display structure of downloaded files
run: tree -a artifacts/
- name: Install Codecov CLI
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
mv ./codecov /usr/local/bin/
- name: Create XML Coverage Reports
run: |
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/
- name: Combine Code Coverage
run: |
nox --force-color -e combine-coverage
@ -2762,6 +2971,14 @@ jobs:
distro: photon
version: "4"
arch: aarch64
- pkg-type: rpm
distro: photon
version: "5"
arch: x86_64
- pkg-type: rpm
distro: photon
version: "5"
arch: aarch64
steps:
- uses: actions/checkout@v4
@ -3184,8 +3401,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps
@ -3210,8 +3430,11 @@ jobs:
- fedora-38
- opensuse-15
- photonos-3
- photonos-3-arm64
- photonos-4
- photonos-4-arm64
- photonos-5
- photonos-5-arm64
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
@ -3287,8 +3510,11 @@ jobs:
- debian-12-pkg-tests
- debian-12-arm64-pkg-tests
- photonos-3-pkg-tests
- photonos-3-arm64-pkg-tests
- photonos-4-pkg-tests
- photonos-4-arm64-pkg-tests
- photonos-5-pkg-tests
- photonos-5-arm64-pkg-tests
- ubuntu-2004-pkg-tests
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests

View file

@ -610,6 +610,22 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-3-arm64-ci-deps:
name: Photon OS 3 Arm64 Deps
needs:
- prepare-workflow
- download-onedir-artifact
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-4-ci-deps:
name: Photon OS 4 Deps
needs:
@ -642,6 +658,38 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-ci-deps:
name: Photon OS 5 Deps
needs:
- prepare-workflow
- download-onedir-artifact
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-arm64-ci-deps:
name: Photon OS 5 Arm64 Deps
needs:
- prepare-workflow
- download-onedir-artifact
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
ubuntu-2004-ci-deps:
name: Ubuntu 20.04 Deps
needs:
@ -795,9 +843,12 @@ jobs:
- fedora-38-arm64-ci-deps
- fedora-38-ci-deps
- macos-12-ci-deps
- photonos-3-arm64-ci-deps
- photonos-3-ci-deps
- photonos-4-arm64-ci-deps
- photonos-4-ci-deps
- photonos-5-arm64-ci-deps
- photonos-5-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2204-arm64-ci-deps
@ -1018,8 +1069,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps

View file

@ -1158,6 +1158,23 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-3-arm64-ci-deps:
name: Photon OS 3 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-4-ci-deps:
name: Photon OS 4 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1192,6 +1209,40 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-ci-deps:
name: Photon OS 5 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-arm64-ci-deps:
name: Photon OS 5 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
ubuntu-2004-ci-deps:
name: Ubuntu 20.04 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1480,6 +1531,28 @@ jobs:
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-arm64-pkg-tests:
name: Photon OS 3 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1524,6 +1597,50 @@ jobs:
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-pkg-tests:
name: Photon OS 5 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-arm64-pkg-tests:
name: Photon OS 5 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2166,6 +2283,26 @@ jobs:
skip-code-coverage: false
skip-junit-reports: false
photonos-3-arm64:
name: Photon OS 3 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
photonos-4:
name: Photon OS 4 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2206,6 +2343,46 @@ jobs:
skip-code-coverage: false
skip-junit-reports: false
photonos-5:
name: Photon OS 5 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
photonos-5-arm64:
name: Photon OS 5 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: false
skip-junit-reports: false
ubuntu-2004:
name: Ubuntu 20.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2320,8 +2497,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps
@ -2346,8 +2526,11 @@ jobs:
- fedora-38
- opensuse-15
- photonos-3
- photonos-3-arm64
- photonos-4
- photonos-4-arm64
- photonos-5
- photonos-5-arm64
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
@ -2361,6 +2544,10 @@ jobs:
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
id: python-tools-scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Nox
run: |
python3 -m pip install 'nox==2022.8.7'
@ -2377,6 +2564,28 @@ jobs:
- name: Display structure of downloaded files
run: tree -a artifacts/
- name: Install Codecov CLI
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
mv ./codecov /usr/local/bin/
- name: Create XML Coverage Reports
run: |
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/
- name: Combine Code Coverage
run: |
nox --force-color -e combine-coverage
@ -2481,8 +2690,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps
@ -2507,8 +2719,11 @@ jobs:
- fedora-38
- opensuse-15
- photonos-3
- photonos-3-arm64
- photonos-4
- photonos-4-arm64
- photonos-5
- photonos-5-arm64
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
@ -2523,8 +2738,11 @@ jobs:
- debian-12-pkg-tests
- debian-12-arm64-pkg-tests
- photonos-3-pkg-tests
- photonos-3-arm64-pkg-tests
- photonos-4-pkg-tests
- photonos-4-arm64-pkg-tests
- photonos-5-pkg-tests
- photonos-5-arm64-pkg-tests
- ubuntu-2004-pkg-tests
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests

View file

@ -1180,6 +1180,23 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-3-arm64-ci-deps:
name: Photon OS 3 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-4-ci-deps:
name: Photon OS 4 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1214,6 +1231,40 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-ci-deps:
name: Photon OS 5 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
photonos-5-arm64-ci-deps:
name: Photon OS 5 Arm64 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir-linux
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
ubuntu-2004-ci-deps:
name: Ubuntu 20.04 Deps
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1502,6 +1553,28 @@ jobs:
skip-junit-reports: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-arm64-pkg-tests:
name: Photon OS 3 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: true
skip-junit-reports: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1546,6 +1619,50 @@ jobs:
skip-junit-reports: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-pkg-tests:
name: Photon OS 5 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: true
skip-junit-reports: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-5-arm64-pkg-tests:
name: Photon OS 5 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-rpm-pkgs-onedir
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: true
skip-junit-reports: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2188,6 +2305,26 @@ jobs:
skip-code-coverage: true
skip-junit-reports: true
photonos-3-arm64:
name: Photon OS 3 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-3-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-3-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: true
skip-junit-reports: true
photonos-4:
name: Photon OS 4 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2228,6 +2365,46 @@ jobs:
skip-code-coverage: true
skip-junit-reports: true
photonos-5:
name: Photon OS 5 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: true
skip-junit-reports: true
photonos-5-arm64:
name: Photon OS 5 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- photonos-5-arm64-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-5-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
nox-version: 2022.8.7
python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
skip-code-coverage: true
skip-junit-reports: true
ubuntu-2004:
name: Ubuntu 20.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2619,6 +2796,14 @@ jobs:
distro: photon
version: "4"
arch: aarch64
- pkg-type: rpm
distro: photon
version: "5"
arch: x86_64
- pkg-type: rpm
distro: photon
version: "5"
arch: aarch64
steps:
- uses: actions/checkout@v4
@ -3143,9 +3328,12 @@ jobs:
- fedora-38-arm64-ci-deps
- fedora-38-ci-deps
- macos-12-ci-deps
- photonos-3-arm64-ci-deps
- photonos-3-ci-deps
- photonos-4-arm64-ci-deps
- photonos-4-ci-deps
- photonos-5-arm64-ci-deps
- photonos-5-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2204-arm64-ci-deps
@ -3200,8 +3388,11 @@ jobs:
- fedora-38-arm64-ci-deps
- opensuse-15-ci-deps
- photonos-3-ci-deps
- photonos-3-arm64-ci-deps
- photonos-4-ci-deps
- photonos-4-arm64-ci-deps
- photonos-5-ci-deps
- photonos-5-arm64-ci-deps
- ubuntu-2004-ci-deps
- ubuntu-2004-arm64-ci-deps
- ubuntu-2204-ci-deps
@ -3226,8 +3417,11 @@ jobs:
- fedora-38
- opensuse-15
- photonos-3
- photonos-3-arm64
- photonos-4
- photonos-4-arm64
- photonos-5
- photonos-5-arm64
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
@ -3242,8 +3436,11 @@ jobs:
- debian-12-pkg-tests
- debian-12-arm64-pkg-tests
- photonos-3-pkg-tests
- photonos-3-arm64-pkg-tests
- photonos-4-pkg-tests
- photonos-4-arm64-pkg-tests
- photonos-5-pkg-tests
- photonos-5-arm64-pkg-tests
- ubuntu-2004-pkg-tests
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests

View file

@ -22,6 +22,8 @@
("photon", "3", "aarch64"),
("photon", "4", "x86_64"),
("photon", "4", "aarch64"),
("photon", "5", "x86_64"),
("photon", "5", "aarch64"),
) %>
- pkg-type: rpm
distro: <{ distro }>

View file

@ -348,6 +348,10 @@
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
id: python-tools-scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Nox
run: |
python3 -m pip install 'nox==<{ nox_version }>'
@ -375,6 +379,28 @@
- name: Display structure of downloaded files
run: tree -a artifacts/
- name: Install Codecov CLI
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
mv ./codecov /usr/local/bin/
- name: Create XML Coverage Reports
run: |
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/
- name: Combine Code Coverage
run: |
nox --force-color -e combine-coverage

View file

@ -1,4 +1,5 @@
<%- for slug, display_name, arch, pkg_type in test_salt_pkg_listing["linux"] %>
<%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %>
<{ job_name }>:

View file

@ -90,9 +90,9 @@ jobs:
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
# Full test runs. Each chunk should never take more than 2 hours.
# Full test runs. Each chunk should never take more than 3 hours.
# Partial test runs(no chunk parallelization), 5 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 120 || 300 }}
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 180 || 300 }}
needs:
- generate-matrix
strategy:
@ -154,7 +154,7 @@ jobs:
- name: Decompress .nox Directory
run: |
nox -e decompress-dependencies -- ${{ inputs.distro-slug }}
nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }}
- name: Download testrun-changed-files.txt
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
@ -172,7 +172,7 @@ jobs:
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "1"
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin"
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin"
- name: Run Fast/Changed Tests
id: run-fast-changed-tests
@ -188,7 +188,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
@ -206,7 +206,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests \
--from-filenames=testrun-changed-files.txt
@ -224,7 +224,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests \
--from-filenames=testrun-changed-files.txt
@ -242,7 +242,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code
- name: Run Slow Tests
@ -259,7 +259,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests
- name: Run Core Tests
@ -276,7 +276,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests
- name: Run Flaky Tests
@ -293,7 +293,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --flaky-jail
- name: Run Full Tests
@ -310,7 +310,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
--slow-tests --core-tests -k "mac or darwin"
- name: Fix file ownership
@ -320,7 +320,7 @@ jobs:
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false
run: |
nox -e combine-coverage
nox --force-color -e combine-coverage
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
@ -403,85 +403,9 @@ jobs:
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox -e create-xml-coverage-reports
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/macos/codecov
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }} \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }} \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
nox --force-color -e create-xml-coverage-reports
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'

View file

@ -98,9 +98,9 @@ jobs:
- self-hosted
- linux
- bastion
# Full test runs. Each chunk should never take more than 2 hours.
# Full test runs. Each chunk should never take more than 3 hours.
# Partial test runs(no chunk parallelization), 5 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 120 || 300 }}
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 180 || 300 }}
needs:
- generate-matrix
strategy:
@ -350,85 +350,9 @@ jobs:
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox -e create-xml-coverage-reports
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }} \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }} \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
nox --force-color -e create-xml-coverage-reports
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'

View file

@ -131,12 +131,21 @@ jobs:
- distro-slug: photonos-3
arch: x86_64
pkg-type: package
- distro-slug: photonos-3-arm64
arch: aarch64
pkg-type: package
- distro-slug: photonos-4
arch: x86_64
pkg-type: package
- distro-slug: photonos-4-arm64
arch: aarch64
pkg-type: package
- distro-slug: photonos-5
arch: x86_64
pkg-type: package
- distro-slug: photonos-5-arm64
arch: aarch64
pkg-type: package
- distro-slug: ubuntu-20.04
arch: x86_64
pkg-type: package

1
changelog/44722.fixed.md Normal file
View file

@ -0,0 +1 @@
Allow `pip.install` to create a log file that is passed in if the parent directory is writeable

1
changelog/63583.fixed.md Normal file
View file

@ -0,0 +1 @@
Need to make sure we update __pillar__ during a pillar refresh to ensure that process_beacons has the updated beacons loaded from pillar.

1
changelog/64934.fixed.md Normal file
View file

@ -0,0 +1 @@
Add missing MySQL Grant SERVICE_CONNECTION_ADMIN to mysql module.

3
changelog/65008.added.md Normal file
View file

@ -0,0 +1,3 @@
Add support for show_jid to salt-run
Adds support for show_jid master config option to salt-run, so its behaviour matches the salt cli command.

1
changelog/65027.fixed.md Normal file
View file

@ -0,0 +1 @@
Ensure sync from _grains occurs before attempting pillar compilation in case custom grain used in pillar file

1
changelog/65372.fixed.md Normal file
View file

@ -0,0 +1 @@
Test mode for state function `saltmod.wheel` no longer set's `result` to `(None,)`

View file

@ -1 +1 @@
centosstream-9-x86_64: ami-09b72b340acb62c73
centosstream-9-x86_64: ami-0df4c4ee0d3a417e6

View file

@ -1,8 +1,8 @@
{
"almalinux-8-arm64": {
"ami": "ami-04c86a9990a3836b9",
"ami": "ami-01701198f23cc656f",
"ami_description": "CI Image of AlmaLinux 8 arm64",
"ami_name": "salt-project/ci/almalinux/8/arm64/20231005.1556",
"ami_name": "salt-project/ci/almalinux/8/arm64/20231019.0610",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -10,9 +10,9 @@
"ssh_username": "ec2-user"
},
"almalinux-8": {
"ami": "ami-059ed5c00c02c564b",
"ami": "ami-0d1fa37788a762561",
"ami_description": "CI Image of AlmaLinux 8 x86_64",
"ami_name": "salt-project/ci/almalinux/8/x86_64/20231005.1557",
"ami_name": "salt-project/ci/almalinux/8/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -20,9 +20,9 @@
"ssh_username": "ec2-user"
},
"almalinux-9-arm64": {
"ami": "ami-0213f3e31656f7393",
"ami": "ami-0690d2b725982ad83",
"ami_description": "CI Image of AlmaLinux 9 arm64",
"ami_name": "salt-project/ci/almalinux/9/arm64/20231005.1557",
"ami_name": "salt-project/ci/almalinux/9/arm64/20231019.0610",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -30,9 +30,9 @@
"ssh_username": "ec2-user"
},
"almalinux-9": {
"ami": "ami-0c4e36d63e728ee21",
"ami": "ami-0ffb222eea4b1c4ee",
"ami_description": "CI Image of AlmaLinux 9 x86_64",
"ami_name": "salt-project/ci/almalinux/9/x86_64/20231005.1557",
"ami_name": "salt-project/ci/almalinux/9/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -40,9 +40,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2-arm64": {
"ami": "ami-010d24ab23bfb0330",
"ami": "ami-0e9521385f61055a0",
"ami_description": "CI Image of AmazonLinux 2 arm64",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20231005.1614",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20231019.0610",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -50,29 +50,29 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2": {
"ami": "ami-0ad016fe17f923c6b",
"ami": "ami-038cc290cd0dd2fb3",
"ami_description": "CI Image of AmazonLinux 2 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20231005.1614",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "ec2-user"
},
"amazonlinux-2023.1-arm64": {
"ami": "ami-037b7d6177ec8259d",
"ami_description": "CI Image of AmazonLinux 2023.1 arm64",
"ami_name": "salt-project/ci/amazonlinux/2023.1/arm64/20231005.1555",
"amazonlinux-2023-arm64": {
"ami": "ami-00aadf98a51c60684",
"ami_description": "CI Image of AmazonLinux 2023 arm64",
"ami_name": "salt-project/ci/amazonlinux/2023/arm64/20231019.0611",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "ec2-user"
},
"amazonlinux-2023.1": {
"ami": "ami-08e04f6dd44c858fa",
"ami_description": "CI Image of AmazonLinux 2023.1 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2023.1/x86_64/20231005.1555",
"amazonlinux-2023": {
"ami": "ami-0aeb34a1da784672c",
"ami_description": "CI Image of AmazonLinux 2023 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20231019.0611",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -80,9 +80,9 @@
"ssh_username": "ec2-user"
},
"archlinux-lts": {
"ami": "ami-0b88ddfb321aff9ba",
"ami": "ami-0b4ab49118d17c567",
"ami_description": "CI Image of ArchLinux lts x86_64",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20231005.1555",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "false",
"instance_type": "t3a.large",
@ -90,9 +90,9 @@
"ssh_username": "arch"
},
"centos-7-arm64": {
"ami": "ami-01d5ee66081a02154",
"ami": "ami-0712b87973da8b106",
"ami_description": "CI Image of CentOS 7 arm64",
"ami_name": "salt-project/ci/centos/7/arm64/20231005.1617",
"ami_name": "salt-project/ci/centos/7/arm64/20231019.0611",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -100,9 +100,9 @@
"ssh_username": "centos"
},
"centos-7": {
"ami": "ami-020fcff1da1f72f27",
"ami": "ami-0432ac4d81ff9c6d7",
"ami_description": "CI Image of CentOS 7 x86_64",
"ami_name": "salt-project/ci/centos/7/x86_64/20231005.1616",
"ami_name": "salt-project/ci/centos/7/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -110,9 +110,9 @@
"ssh_username": "centos"
},
"centosstream-8-arm64": {
"ami": "ami-0ac6238b6506f7b8f",
"ami": "ami-00819771fc6d6f37a",
"ami_description": "CI Image of CentOSStream 8 arm64",
"ami_name": "salt-project/ci/centosstream/8/arm64/20231005.1614",
"ami_name": "salt-project/ci/centosstream/8/arm64/20231019.0610",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -120,9 +120,9 @@
"ssh_username": "centos"
},
"centosstream-8": {
"ami": "ami-0bfceb03d43d0ba0e",
"ami": "ami-00d0ebd1ad30509fc",
"ami_description": "CI Image of CentOSStream 8 x86_64",
"ami_name": "salt-project/ci/centosstream/8/x86_64/20231005.1615",
"ami_name": "salt-project/ci/centosstream/8/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -130,9 +130,9 @@
"ssh_username": "centos"
},
"centosstream-9-arm64": {
"ami": "ami-04db23ba9082a01bf",
"ami": "ami-08599182d0e9788f9",
"ami_description": "CI Image of CentOSStream 9 arm64",
"ami_name": "salt-project/ci/centosstream/9/arm64/20231005.1615",
"ami_name": "salt-project/ci/centosstream/9/arm64/20231019.0610",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -140,9 +140,9 @@
"ssh_username": "ec2-user"
},
"centosstream-9": {
"ami": "ami-0a47f4f785cb7a81c",
"ami": "ami-0df4c4ee0d3a417e6",
"ami_description": "CI Image of CentOSStream 9 x86_64",
"ami_name": "salt-project/ci/centosstream/9/x86_64/20231005.1615",
"ami_name": "salt-project/ci/centosstream/9/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -150,9 +150,9 @@
"ssh_username": "ec2-user"
},
"debian-10-arm64": {
"ami": "ami-015d30e48fa213528",
"ami": "ami-0be576b80116655d6",
"ami_description": "CI Image of Debian 10 arm64",
"ami_name": "salt-project/ci/debian/10/arm64/20231005.1601",
"ami_name": "salt-project/ci/debian/10/arm64/20231019.0611",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -160,9 +160,9 @@
"ssh_username": "admin"
},
"debian-10": {
"ami": "ami-0397043698fedfa4c",
"ami": "ami-0dc775a61113efde0",
"ami_description": "CI Image of Debian 10 x86_64",
"ami_name": "salt-project/ci/debian/10/x86_64/20231005.1606",
"ami_name": "salt-project/ci/debian/10/x86_64/20231019.0611",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -170,9 +170,9 @@
"ssh_username": "admin"
},
"debian-11-arm64": {
"ami": "ami-008dbab5525972174",
"ami": "ami-086e42800d155779f",
"ami_description": "CI Image of Debian 11 arm64",
"ami_name": "salt-project/ci/debian/11/arm64/20231005.1607",
"ami_name": "salt-project/ci/debian/11/arm64/20231019.0611",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -180,9 +180,9 @@
"ssh_username": "admin"
},
"debian-11": {
"ami": "ami-04fc56501daaf3c94",
"ami": "ami-01b730ce9083afb7b",
"ami_description": "CI Image of Debian 11 x86_64",
"ami_name": "salt-project/ci/debian/11/x86_64/20231005.1607",
"ami_name": "salt-project/ci/debian/11/x86_64/20231019.0611",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -190,9 +190,9 @@
"ssh_username": "admin"
},
"debian-12-arm64": {
"ami": "ami-0956b73228a7368c3",
"ami": "ami-0a8fb0c54e8ac78c3",
"ami_description": "CI Image of Debian 12 arm64",
"ami_name": "salt-project/ci/debian/12/arm64/20231005.1610",
"ami_name": "salt-project/ci/debian/12/arm64/20231019.0611",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -200,9 +200,9 @@
"ssh_username": "admin"
},
"debian-12": {
"ami": "ami-0d0aa04bb5c49e54f",
"ami": "ami-09736ea89f5625680",
"ami_description": "CI Image of Debian 12 x86_64",
"ami_name": "salt-project/ci/debian/12/x86_64/20231005.1613",
"ami_name": "salt-project/ci/debian/12/x86_64/20231019.0611",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -210,9 +210,9 @@
"ssh_username": "admin"
},
"fedora-37-arm64": {
"ami": "ami-0201f64fda9f1ca6d",
"ami": "ami-067631a1bb1d3d6e4",
"ami_description": "CI Image of Fedora 37 arm64",
"ami_name": "salt-project/ci/fedora/37/arm64/20231005.1617",
"ami_name": "salt-project/ci/fedora/37/arm64/20231019.0630",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -220,9 +220,9 @@
"ssh_username": "fedora"
},
"fedora-37": {
"ami": "ami-02dfc80c8b14fd5bc",
"ami": "ami-03dab52e75c1d7594",
"ami_description": "CI Image of Fedora 37 x86_64",
"ami_name": "salt-project/ci/fedora/37/x86_64/20231005.1618",
"ami_name": "salt-project/ci/fedora/37/x86_64/20231019.0630",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -230,9 +230,9 @@
"ssh_username": "fedora"
},
"fedora-38-arm64": {
"ami": "ami-0b03c270c7f50165d",
"ami": "ami-0a67ad5dc0b4e67a9",
"ami_description": "CI Image of Fedora 38 arm64",
"ami_name": "salt-project/ci/fedora/38/arm64/20231005.1618",
"ami_name": "salt-project/ci/fedora/38/arm64/20231019.0630",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -240,9 +240,9 @@
"ssh_username": "fedora"
},
"fedora-38": {
"ami": "ami-0927a80620f670c23",
"ami": "ami-00e8299d247d3bfb9",
"ami_description": "CI Image of Fedora 38 x86_64",
"ami_name": "salt-project/ci/fedora/38/x86_64/20231005.1626",
"ami_name": "salt-project/ci/fedora/38/x86_64/20231019.0630",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -250,9 +250,9 @@
"ssh_username": "fedora"
},
"opensuse-15": {
"ami": "ami-0b51e3479fabb4078",
"ami": "ami-0fa4ce121739032e2",
"ami_description": "CI Image of Opensuse 15 x86_64",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20231005.1614",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20231019.0611",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -260,9 +260,9 @@
"ssh_username": "ec2-user"
},
"photonos-3-arm64": {
"ami": "ami-0a33037524874686c",
"ami": "ami-09687bbdca9322cfd",
"ami_description": "CI Image of PhotonOS 3 arm64",
"ami_name": "salt-project/ci/photonos/3/arm64/20231005.1558",
"ami_name": "salt-project/ci/photonos/3/arm64/20231019.0626",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -270,9 +270,9 @@
"ssh_username": "root"
},
"photonos-3": {
"ami": "ami-068c5c07aa91d84d1",
"ami": "ami-0e29021a535519231",
"ami_description": "CI Image of PhotonOS 3 x86_64",
"ami_name": "salt-project/ci/photonos/3/x86_64/20231005.1558",
"ami_name": "salt-project/ci/photonos/3/x86_64/20231019.0626",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -280,9 +280,9 @@
"ssh_username": "root"
},
"photonos-4-arm64": {
"ami": "ami-0f8c72854c5b5679c",
"ami": "ami-06a0418b67a9ec332",
"ami_description": "CI Image of PhotonOS 4 arm64",
"ami_name": "salt-project/ci/photonos/4/arm64/20231005.1558",
"ami_name": "salt-project/ci/photonos/4/arm64/20231019.0626",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -290,9 +290,9 @@
"ssh_username": "root"
},
"photonos-4": {
"ami": "ami-04b8974b830b5adb0",
"ami": "ami-08ae023a2755a60dc",
"ami_description": "CI Image of PhotonOS 4 x86_64",
"ami_name": "salt-project/ci/photonos/4/x86_64/20231005.1559",
"ami_name": "salt-project/ci/photonos/4/x86_64/20231019.0626",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -300,9 +300,9 @@
"ssh_username": "root"
},
"photonos-5-arm64": {
"ami": "ami-0f466b198cbcaf380",
"ami": "ami-05b3dd82b94e82736",
"ami_description": "CI Image of PhotonOS 5 arm64",
"ami_name": "salt-project/ci/photonos/5/arm64/20231005.1559",
"ami_name": "salt-project/ci/photonos/5/arm64/20231019.0627",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -310,9 +310,9 @@
"ssh_username": "root"
},
"photonos-5": {
"ami": "ami-01bb09f84464b243e",
"ami": "ami-016991d4c267732c3",
"ami_description": "CI Image of PhotonOS 5 x86_64",
"ami_name": "salt-project/ci/photonos/5/x86_64/20231005.1601",
"ami_name": "salt-project/ci/photonos/5/x86_64/20231019.0627",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -320,9 +320,9 @@
"ssh_username": "root"
},
"ubuntu-20.04-arm64": {
"ami": "ami-06d9a9e3b5ae369c7",
"ami": "ami-0dc851d4db96c052b",
"ami_description": "CI Image of Ubuntu 20.04 arm64",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20231005.1555",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20231019.0628",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -330,9 +330,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-20.04": {
"ami": "ami-080a55fb6cb08134d",
"ami": "ami-05c262fca2254d2cb",
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20231005.1555",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20231019.0627",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -340,9 +340,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04-arm64": {
"ami": "ami-0c87b8f0b8794f32e",
"ami": "ami-007415ef606318020",
"ami_description": "CI Image of Ubuntu 22.04 arm64",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20231005.1555",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20231019.0628",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -350,9 +350,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04": {
"ami": "ami-0ce98043f227c9ac0",
"ami": "ami-04d01b95ca8570ed3",
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20231005.1555",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20231019.0628",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -360,9 +360,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04-arm64": {
"ami": "ami-0519c583e36309fef",
"ami": "ami-0da01b22cca0f4281",
"ami_description": "CI Image of Ubuntu 23.04 arm64",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20231005.1555",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20231019.0629",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -370,9 +370,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04": {
"ami": "ami-063ad5dfb49f09182",
"ami": "ami-03e32d8e9ccc6cd6a",
"ami_description": "CI Image of Ubuntu 23.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20231005.1555",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20231019.0629",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -380,9 +380,9 @@
"ssh_username": "ubuntu"
},
"windows-2016": {
"ami": "ami-0f1ac34593b8b044f",
"ami": "ami-02fd868528f2c7a62",
"ami_description": "CI Image of Windows 2016 x86_64",
"ami_name": "salt-project/ci/windows/2016/x86_64/20231005.1615",
"ami_name": "salt-project/ci/windows/2016/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -390,9 +390,9 @@
"ssh_username": "Administrator"
},
"windows-2019": {
"ami": "ami-09100ff6a103a28ab",
"ami": "ami-0d6f2b5a109c98224",
"ami_description": "CI Image of Windows 2019 x86_64",
"ami_name": "salt-project/ci/windows/2019/x86_64/20231005.1615",
"ami_name": "salt-project/ci/windows/2019/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -400,9 +400,9 @@
"ssh_username": "Administrator"
},
"windows-2022": {
"ami": "ami-0266dc6a12bc9fca6",
"ami": "ami-013e3141df4b2418f",
"ami_description": "CI Image of Windows 2022 x86_64",
"ami_name": "salt-project/ci/windows/2022/x86_64/20231005.1616",
"ami_name": "salt-project/ci/windows/2022/x86_64/20231019.0610",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",

View file

@ -29,7 +29,7 @@
# One of 'garbage', 'trace', 'debug', 'info', 'warning', 'error', 'critical'.
#
# The following log levels are considered INSECURE and may log sensitive data:
# ['garbage', 'trace', 'debug']
# ['profile', 'garbage', 'trace', 'debug', 'all']
#
# Default: 'info'
#

View file

@ -1198,7 +1198,7 @@
# One of 'garbage', 'trace', 'debug', info', 'warning', 'error', 'critical'.
#
# The following log levels are considered INSECURE and may log sensitive data:
# ['garbage', 'trace', 'debug']
# ['profile', 'garbage', 'trace', 'debug', 'all']
#
#log_level: warning

View file

@ -809,7 +809,7 @@
# One of 'garbage', 'trace', 'debug', 'info', 'warning', 'error', 'critical'.
#
# The following log levels are considered INSECURE and may log sensitive data:
# ['garbage', 'trace', 'debug']
# ['profile', 'garbage', 'trace', 'debug', 'all']
#
# Default: 'warning'
#log_level: warning

View file

@ -545,7 +545,7 @@
# One of 'garbage', 'trace', 'debug', 'info', 'warning', 'error', 'critical'.
#
# The following log levels are considered INSECURE and may log sensitive data:
# ['garbage', 'trace', 'debug']
# ['profile', 'garbage', 'trace', 'debug', 'all']
#
# Default: 'warning'
#log_level: warning

View file

@ -61,6 +61,13 @@ available in salt are shown in the table below.
| all | 0 | Everything |
+----------+---------------+--------------------------------------------------------------------------+
Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes:
#. profile
#. debug
#. trace
#. garbage
#. all
Available Configuration Settings
================================

View file

@ -5446,6 +5446,13 @@ The level of messages to send to the console. See also :conf_log:`log_level`.
log_level: warning
Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes:
#. profile
#. debug
#. trace
#. garbage
#. all
.. conf_master:: log_level_logfile
``log_level_logfile``
@ -5461,6 +5468,13 @@ it will inherit the level set by :conf_log:`log_level` option.
log_level_logfile: warning
Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes:
#. profile
#. debug
#. trace
#. garbage
#. all
.. conf_master:: log_datefmt
``log_datefmt``

View file

@ -3307,6 +3307,12 @@ The level of messages to send to the console. See also :conf_log:`log_level`.
log_level: warning
Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes:
#. profile
#. debug
#. trace
#. garbage
#. all
.. conf_minion:: log_level_logfile
@ -3323,6 +3329,12 @@ it will inherit the level set by :conf_log:`log_level` option.
log_level_logfile: warning
Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes:
#. profile
#. debug
#. trace
#. garbage
#. all
.. conf_minion:: log_datefmt

View file

@ -1253,32 +1253,52 @@ def decompress_dependencies(session):
else:
scan_path = REPO_ROOT.joinpath(".nox", dirname, "Scripts")
script_paths = {str(p): p for p in os.scandir(scan_path)}
fixed_shebang = f"#!{scan_path / 'python'}"
for key in sorted(script_paths):
path = script_paths[key]
if not path.is_symlink():
if path.is_symlink():
broken_link = pathlib.Path(path)
resolved_link = os.readlink(path)
if not os.path.isabs(resolved_link):
# Relative symlinks, resolve them
resolved_link = os.path.join(scan_path, resolved_link)
if not os.path.exists(resolved_link):
session.log("The symlink %r looks to be broken", resolved_link)
# This is a broken link, fix it
resolved_link_suffix = resolved_link.split(
f"artifacts{os.sep}salt{os.sep}"
)[-1]
fixed_link = REPO_ROOT.joinpath(
"artifacts", "salt", resolved_link_suffix
)
session.log(
"Fixing broken symlink in nox virtualenv %r, from %r to %r",
dirname.name,
resolved_link,
str(fixed_link.relative_to(REPO_ROOT)),
)
broken_link.unlink()
broken_link.symlink_to(fixed_link)
continue
broken_link = pathlib.Path(path)
resolved_link = os.readlink(path)
if not os.path.isabs(resolved_link):
# Relative symlinks, resolve them
resolved_link = os.path.join(scan_path, resolved_link)
if not os.path.exists(resolved_link):
session.log("The symlink %r looks to be broken", resolved_link)
# This is a broken link, fix it
resolved_link_suffix = resolved_link.split(
f"artifacts{os.sep}salt{os.sep}"
)[-1]
fixed_link = REPO_ROOT.joinpath(
"artifacts", "salt", resolved_link_suffix
)
session.log(
"Fixing broken symlink in nox virtualenv %r, from %r to %r",
dirname.name,
resolved_link,
str(fixed_link.relative_to(REPO_ROOT)),
)
broken_link.unlink()
broken_link.symlink_to(fixed_link)
if not path.is_file():
continue
if "windows" not in distro_slug:
# Let's try to fix shebang's
try:
fpath = pathlib.Path(path)
contents = fpath.read_text().splitlines()
if (
contents[0].startswith("#!")
and contents[0].endswith("python")
and contents[0] != fixed_shebang
):
session.log(
"Fixing broken shebang in %r",
str(fpath.relative_to(REPO_ROOT)),
)
fpath.write_text("\n".join([fixed_shebang] + contents[1:]))
except UnicodeDecodeError:
pass
@nox.session(python=False, name="compress-dependencies")
@ -1916,7 +1936,7 @@ def ci_test_onedir_pkgs(session):
# Install requirements
if _upgrade_pip_setuptools_and_wheel(session, onedir=True):
_install_requirements(session, "zeromq")
_install_requirements(session, "pyzmq")
env = {
"ONEDIR_TESTRUN": "1",
"PKG_TEST_TYPE": chunk,

View file

@ -11,9 +11,21 @@ def test_services(install_salt, salt_cli, salt_minion):
if install_salt.distro_id in ("ubuntu", "debian"):
services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"]
services_disabled = []
elif install_salt.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"):
elif install_salt.distro_id in ("centos", "redhat", "amzn", "fedora"):
services_enabled = []
services_disabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"]
elif install_salt.distro_id == "photon":
if float(install_salt.distro_version) < 5:
services_enabled = []
services_disabled = [
"salt-master",
"salt-minion",
"salt-syndic",
"salt-api",
]
else:
services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"]
services_disabled = []
elif platform.is_darwin():
services_enabled = ["salt-minion"]
services_disabled = []
@ -21,17 +33,9 @@ def test_services(install_salt, salt_cli, salt_minion):
pytest.fail(f"Don't know how to handle os_family={install_salt.distro_id}")
for service in services_enabled:
assert (
"true"
in salt_cli.run(
"service.enabled", service, minion_tgt=salt_minion.id
).stdout
)
ret = salt_cli.run("service.enabled", service, minion_tgt=salt_minion.id)
assert "true" in ret.stdout
for service in services_disabled:
assert (
"true"
in salt_cli.run(
"service.disabled", service, minion_tgt=salt_minion.id
).stdout
)
ret = salt_cli.run("service.disabled", service, minion_tgt=salt_minion.id)
assert "true" in ret.stdout

View file

@ -24,6 +24,7 @@ def test_system_config(salt_cli, salt_minion):
"Fedora Linux-36",
"VMware Photon OS-3",
"VMware Photon OS-4",
"VMware Photon OS-5",
):
ret = subprocess.call(
"systemctl show -p ${config} salt-minion.service", shell=True

View file

@ -12,7 +12,7 @@ import warnings
if sys.platform.startswith("win"):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
if sys.version_info < (3,):
if sys.version_info < (3,): # pragma: no cover
sys.stderr.write(
"\n\nAfter the Sodium release, 3001, Salt no longer supports Python 2. Exiting.\n\n"
)

View file

@ -41,12 +41,16 @@ log = logging.getLogger(__name__)
MAX_FILENAME_LENGTH = 255
def get_file_client(opts, pillar=False):
def get_file_client(opts, pillar=False, force_local=False):
"""
Read in the ``file_client`` option and return the correct type of file
server
"""
client = opts.get("file_client", "remote")
if force_local:
client = "local"
else:
client = opts.get("file_client", "remote")
if pillar and client == "local":
client = "pillar"
return {"remote": RemoteClient, "local": FSClient, "pillar": PillarClient}.get(

View file

@ -46,6 +46,7 @@ import salt.utils.dictdiffer
import salt.utils.dictupdate
import salt.utils.error
import salt.utils.event
import salt.utils.extmods
import salt.utils.files
import salt.utils.jid
import salt.utils.minion
@ -114,6 +115,29 @@ log = logging.getLogger(__name__)
# 6. Handle publications
def _sync_grains(opts):
# need sync of custom grains as may be used in pillar compilation
# if coming up initially and remote client, the first sync _grains
# doesn't have opts["master_uri"] set yet during the sync, so need
# to force local, otherwise will throw an exception when attempting
# to retrieve opts["master_uri"] when retrieving key for remote communication
# in addition opts sometimes does not contain extmod_whitelist and extmod_blacklist
# hence set those to defaults, empty dict, if not part of opts, as ref'd in
# salt.utils.extmod sync function
if opts.get("extmod_whitelist", None) is None:
opts["extmod_whitelist"] = {}
if opts.get("extmod_blacklist", None) is None:
opts["extmod_blacklist"] = {}
if opts.get("file_client", "remote") == "remote" and not opts.get(
"master_uri", None
):
salt.utils.extmods.sync(opts, "grains", force_local=True)
else:
salt.utils.extmods.sync(opts, "grains")
def resolve_dns(opts, fallback=True):
"""
Resolves the master_ip and master_uri options
@ -921,6 +945,7 @@ class SMinion(MinionBase):
# Late setup of the opts grains, so we can log from the grains module
import salt.loader
_sync_grains(opts)
opts["grains"] = salt.loader.grains(opts)
super().__init__(opts)
@ -2544,6 +2569,7 @@ class Minion(MinionBase):
current_schedule, new_schedule
)
self.opts["pillar"] = new_pillar
self.functions.pack["__pillar__"] = self.opts["pillar"]
finally:
async_pillar.destroy()
self.matchers_refresh()
@ -3934,6 +3960,8 @@ class SProxyMinion(SMinion):
salt '*' sys.reload_modules
"""
# need sync of custom grains as may be used in pillar compilation
salt.utils.extmods.sync(self.opts, "grains")
self.opts["grains"] = salt.loader.grains(self.opts)
self.opts["pillar"] = salt.pillar.get_pillar(
self.opts,

View file

@ -122,6 +122,7 @@ __grants__ = [
"SELECT",
"SET USER", # MariaDB since 10.5.2
"SET_USER_ID",
"SERVICE_CONNECTION_ADMIN", # MySQL since 8.0.14
"SHOW DATABASES",
"SHOW VIEW",
"SHUTDOWN",
@ -264,7 +265,7 @@ def __virtual__():
def __mysql_hash_password(password):
_password = hashlib.sha1(password.encode()).digest()
_password = "*{}".format(hashlib.sha1(_password).hexdigest().upper())
_password = f"*{hashlib.sha1(_password).hexdigest().upper()}"
return _password
@ -276,7 +277,7 @@ def __check_table(name, table, **connection_args):
s_name = quote_identifier(name)
s_table = quote_identifier(table)
# identifiers cannot be used as values
qry = "CHECK TABLE {}.{}".format(s_name, s_table)
qry = f"CHECK TABLE {s_name}.{s_table}"
_execute(cur, qry)
results = cur.fetchall()
log.debug(results)
@ -291,7 +292,7 @@ def __repair_table(name, table, **connection_args):
s_name = quote_identifier(name)
s_table = quote_identifier(table)
# identifiers cannot be used as values
qry = "REPAIR TABLE {}.{}".format(s_name, s_table)
qry = f"REPAIR TABLE {s_name}.{s_table}"
_execute(cur, qry)
results = cur.fetchall()
log.debug(results)
@ -306,7 +307,7 @@ def __optimize_table(name, table, **connection_args):
s_name = quote_identifier(name)
s_table = quote_identifier(table)
# identifiers cannot be used as values
qry = "OPTIMIZE TABLE {}.{}".format(s_name, s_table)
qry = f"OPTIMIZE TABLE {s_name}.{s_table}"
_execute(cur, qry)
results = cur.fetchall()
log.debug(results)
@ -387,7 +388,7 @@ def _connect(**kwargs):
name = name[len(prefix) :]
except IndexError:
return
val = __salt__["config.option"]("mysql.{}".format(name), None)
val = __salt__["config.option"](f"mysql.{name}", None)
if val is not None:
connargs[key] = val
@ -582,7 +583,7 @@ def _grant_to_tokens(grant):
if not column:
current_grant = token
else:
token = "{}.{}".format(current_grant, token)
token = f"{current_grant}.{token}"
grant_tokens.append(token)
else: # This is a multi-word, ala LOCK TABLES
multiword_statement.append(token)
@ -1250,7 +1251,7 @@ def db_tables(name, **connection_args):
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = "SHOW TABLES IN {}".format(s_name)
qry = f"SHOW TABLES IN {s_name}"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
@ -1329,7 +1330,7 @@ def db_create(name, character_set=None, collate=None, **connection_args):
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = "CREATE DATABASE IF NOT EXISTS {}".format(s_name)
qry = f"CREATE DATABASE IF NOT EXISTS {s_name}"
args = {}
if character_set is not None:
qry += " CHARACTER SET %(character_set)s"
@ -1376,7 +1377,7 @@ def db_remove(name, **connection_args):
cur = dbc.cursor()
s_name = quote_identifier(name)
# identifiers cannot be used as values
qry = "DROP DATABASE {};".format(s_name)
qry = f"DROP DATABASE {s_name};"
try:
_execute(cur, qry)
except MySQLdb.OperationalError as exc:
@ -1430,7 +1431,7 @@ def _mysql_user_exists(
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
@ -1475,7 +1476,7 @@ def _mariadb_user_exists(
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args
**connection_args,
):
qry = "SELECT User,Host FROM mysql.user WHERE User = %(user)s AND Host = %(host)s"
@ -1507,7 +1508,7 @@ def user_exists(
passwordless=False,
unix_socket=False,
password_column=None,
**connection_args
**connection_args,
):
"""
Checks if a user exists on the MySQL server. A login can be checked to see
@ -1552,7 +1553,7 @@ def user_exists(
if (
dbc is None
and __context__["mysql.error"].startswith(
"MySQL Error 1045: Access denied for user '{}'@".format(user)
f"MySQL Error 1045: Access denied for user '{user}'@"
)
and password
):
@ -1579,7 +1580,7 @@ def user_exists(
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args
**connection_args,
)
else:
qry, args = _mysql_user_exists(
@ -1591,7 +1592,7 @@ def user_exists(
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args
**connection_args,
)
try:
@ -1646,7 +1647,7 @@ def _mysql_user_create(
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
@ -1709,7 +1710,7 @@ def _mariadb_user_create(
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args
**connection_args,
):
qry = "CREATE USER %(user)s@%(host)s"
@ -1755,7 +1756,7 @@ def user_create(
unix_socket=False,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args
**connection_args,
):
"""
Creates a MySQL user
@ -1845,7 +1846,7 @@ def user_create(
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args
**connection_args,
)
else:
qry, args = _mysql_user_create(
@ -1857,7 +1858,7 @@ def user_create(
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args
**connection_args,
)
if isinstance(qry, bool):
@ -1877,9 +1878,9 @@ def user_create(
password,
password_hash,
password_column=password_column,
**connection_args
**connection_args,
):
msg = "User '{}'@'{}' has been created".format(user, host)
msg = f"User '{user}'@'{host}' has been created"
if not any((password, password_hash)):
msg += " with passwordless login"
log.info(msg)
@ -1898,7 +1899,7 @@ def _mysql_user_chpass(
unix_socket=None,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
compare_version = "8.0.11"
@ -1984,7 +1985,7 @@ def _mariadb_user_chpass(
unix_socket=None,
password_column=None,
auth_plugin="mysql_native_password",
**connection_args
**connection_args,
):
server_version = salt.utils.data.decode(version(**connection_args))
@ -2055,7 +2056,7 @@ def user_chpass(
allow_passwordless=False,
unix_socket=None,
password_column=None,
**connection_args
**connection_args,
):
"""
Change password for a MySQL user
@ -2140,7 +2141,7 @@ def user_chpass(
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args
**connection_args,
)
else:
qry, args = _mysql_user_chpass(
@ -2152,7 +2153,7 @@ def user_chpass(
unix_socket,
password_column=password_column,
auth_plugin=auth_plugin,
**connection_args
**connection_args,
)
try:
@ -2330,7 +2331,7 @@ def __grant_normalize(grant):
exploded_grants = __grant_split(grant)
for chkgrant, _ in exploded_grants:
if chkgrant.strip().upper() not in __grants__:
raise Exception("Invalid grant : '{}'".format(chkgrant))
raise Exception(f"Invalid grant : '{chkgrant}'")
return grant
@ -2350,7 +2351,7 @@ def __ssl_option_sanitize(ssl_option):
normal_key = key.strip().upper()
if normal_key not in __ssl_options__:
raise Exception("Invalid SSL option : '{}'".format(key))
raise Exception(f"Invalid SSL option : '{key}'")
if normal_key in __ssl_options_parameterized__:
# SSL option parameters (cipher, issuer, subject) are pasted directly to SQL so
@ -2398,7 +2399,7 @@ def __grant_generate(
if table != "*":
table = quote_identifier(table)
# identifiers cannot be used as values, and same thing for grants
qry = "GRANT {} ON {}.{} TO %(user)s@%(host)s".format(grant, dbc, table)
qry = f"GRANT {grant} ON {dbc}.{table} TO %(user)s@%(host)s"
args = {}
args["user"] = user
args["host"] = host
@ -2445,7 +2446,7 @@ def user_grants(user, host="localhost", **connection_args):
for grant in results:
tmp = grant[0].split(" IDENTIFIED BY")[0]
if "WITH GRANT OPTION" in grant[0] and "WITH GRANT OPTION" not in tmp:
tmp = "{} WITH GRANT OPTION".format(tmp)
tmp = f"{tmp} WITH GRANT OPTION"
ret.append(tmp)
log.debug(ret)
return ret
@ -2458,7 +2459,7 @@ def grant_exists(
host="localhost",
grant_option=False,
escape=True,
**connection_args
**connection_args,
):
"""
Checks to see if a grant exists in the database
@ -2579,7 +2580,7 @@ def grant_add(
grant_option=False,
escape=True,
ssl_option=False,
**connection_args
**connection_args,
):
"""
Adds a grant to the MySQL server.
@ -2635,7 +2636,7 @@ def grant_revoke(
host="localhost",
grant_option=False,
escape=True,
**connection_args
**connection_args,
):
"""
Removes a grant from the MySQL server.
@ -2672,7 +2673,7 @@ def grant_revoke(
if table != "*":
table = quote_identifier(table)
# identifiers cannot be used as values, same thing for grants
qry = "REVOKE {} ON {}.{} FROM %(user)s@%(host)s;".format(grant, s_database, table)
qry = f"REVOKE {grant} ON {s_database}.{table} FROM %(user)s@%(host)s;"
args = {}
args["user"] = user
args["host"] = host
@ -3037,12 +3038,12 @@ def plugin_add(name, soname=None, **connection_args):
if dbc is None:
return False
cur = dbc.cursor()
qry = "INSTALL PLUGIN {}".format(name)
qry = f"INSTALL PLUGIN {name}"
if soname:
qry += ' SONAME "{}"'.format(soname)
qry += f' SONAME "{soname}"'
else:
qry += ' SONAME "{}.so"'.format(name)
qry += f' SONAME "{name}.so"'
try:
_execute(cur, qry)
@ -3077,7 +3078,7 @@ def plugin_remove(name, **connection_args):
if dbc is None:
return False
cur = dbc.cursor()
qry = "UNINSTALL PLUGIN {}".format(name)
qry = f"UNINSTALL PLUGIN {name}"
args = {}
args["name"] = name

View file

@ -500,7 +500,9 @@ def install(
or one or more package names with commas between them
log
Log file where a complete (maximum verbosity) record will be kept
Log file where a complete (maximum verbosity) record will be kept.
If this file doesn't exist and the parent directory is writeable,
it will be created.
proxy
Specify a proxy in the form ``user:passwd@proxy.server:port``. Note
@ -755,6 +757,16 @@ def install(
if log:
if os.path.isdir(log):
raise OSError(f"'{log}' is a directory. Use --log path_to_file")
if not os.path.exists(log):
parent = os.path.dirname(log)
if not os.path.exists(parent):
raise OSError(
f"Trying to create '{log}' but parent directory '{parent}' does not exist."
)
elif not os.access(parent, os.W_OK):
raise OSError(
f"Trying to create '{log}' but parent directory '{parent}' is not writeable."
)
elif not os.access(log, os.W_OK):
raise OSError(f"'{log}' is not writeable")

View file

@ -288,6 +288,9 @@ class Runner(RunnerClient):
return async_pub["jid"] # return the jid
# otherwise run it in the main process
if self.opts.get("show_jid"):
print(f"jid: {self.jid}")
if self.opts.get("eauth"):
ret = self.cmd_sync(low)
if isinstance(ret, dict) and set(ret) == {"data", "outputter"}:

View file

@ -1041,7 +1041,7 @@ def wheel(name, **kwargs):
jid = None
if __opts__.get("test", False):
ret["result"] = (None,)
ret["result"] = None
ret["changes"] = {}
ret["comment"] = f"Wheel function '{name}' would be executed."
return ret

View file

@ -32,7 +32,14 @@ def _listdir_recursively(rootdir):
return file_list
def sync(opts, form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None):
def sync(
opts,
form,
saltenv=None,
extmod_whitelist=None,
extmod_blacklist=None,
force_local=False,
):
"""
Sync custom modules into the extension_modules directory
"""
@ -62,7 +69,7 @@ def sync(opts, form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None)
ret = []
remote = set()
source = salt.utils.url.create("_" + form)
mod_dir = os.path.join(opts["extension_modules"], "{}".format(form))
mod_dir = os.path.join(opts["extension_modules"], f"{form}")
touched = False
with salt.utils.files.set_umask(0o077):
try:
@ -75,7 +82,9 @@ def sync(opts, form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None)
"Cannot create cache module directory %s. Check permissions.",
mod_dir,
)
with salt.fileclient.get_file_client(opts) as fileclient:
with salt.fileclient.get_file_client(
opts, pillar=False, force_local=force_local
) as fileclient:
for sub_env in saltenv:
log.info("Syncing %s for environment '%s'", form, sub_env)
cache = []
@ -91,7 +100,7 @@ def sync(opts, form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None)
)
)
local_cache_dir = os.path.join(
opts["cachedir"], "files", sub_env, "_{}".format(form)
opts["cachedir"], "files", sub_env, f"_{form}"
)
log.debug("Local cache dir: '%s'", local_cache_dir)
for fn_ in cache:
@ -120,13 +129,13 @@ def sync(opts, form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None)
if src_digest != dst_digest:
# The downloaded file differs, replace!
shutil.copyfile(fn_, dest)
ret.append("{}.{}".format(form, relname))
ret.append(f"{form}.{relname}")
else:
dest_dir = os.path.dirname(dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(fn_, dest)
ret.append("{}.{}".format(form, relname))
ret.append(f"{form}.{relname}")
touched = bool(ret)
if opts["clean_dynamic_modules"] is True:

View file

@ -41,7 +41,7 @@ import salt.utils.yaml
import salt.version as version
from salt.defaults import DEFAULT_TARGET_DELIM
from salt.utils.validate.path import is_writeable
from salt.utils.verify import verify_log, verify_log_files
from salt.utils.verify import insecure_log, verify_log, verify_log_files
log = logging.getLogger(__name__)
@ -610,9 +610,11 @@ class LogLevelMixIn(metaclass=MixInMeta):
*self._console_log_level_cli_flags,
dest=self._loglevel_config_setting_name_,
choices=list(salt._logging.LOG_LEVELS),
help="Console logging log level. One of {}. Default: '{}'.".format(
help="Console logging log level. One of {}. Default: '{}'. \n "
"The following log levels are INSECURE and may log sensitive data: {}".format(
", ".join([f"'{n}'" for n in salt._logging.SORTED_LEVEL_NAMES]),
self._default_logging_level_,
", ".join(insecure_log()),
),
)
@ -636,9 +638,11 @@ class LogLevelMixIn(metaclass=MixInMeta):
"--log-file-level",
dest=self._logfile_loglevel_config_setting_name_,
choices=list(salt._logging.SORTED_LEVEL_NAMES),
help="Logfile logging log level. One of {}. Default: '{}'.".format(
help="Logfile logging log level. One of {}. Default: '{}'. \n "
"The following log levels are INSECURE and may log sensitive data: {}".format(
", ".join([f"'{n}'" for n in salt._logging.SORTED_LEVEL_NAMES]),
self._default_logging_level_,
", ".join(insecure_log()),
),
)
self._mixin_after_parsed_funcs.append(self.__setup_logging_routines)

View file

@ -572,6 +572,17 @@ def safe_py_code(code):
return True
def insecure_log():
"""
Return the insecure logs types
"""
insecure = []
for level, value in LOG_LEVELS.items():
if value < LOG_LEVELS.get("info", 20):
insecure.append(level)
return insecure
def verify_log(opts):
"""
If an insecre logging configuration is found, show a warning

View file

@ -1419,13 +1419,15 @@ def sshd_server(salt_factories, sshd_config_dir, salt_master, grains):
"/usr/libexec/openssh/sftp-server",
# Arch Linux
"/usr/lib/ssh/sftp-server",
# Photon OS 5
"/usr/libexec/sftp-server",
]
sftp_server_path = None
for path in sftp_server_paths:
if os.path.exists(path):
sftp_server_path = path
if sftp_server_path is None:
log.warning(f"Failed to find 'sftp-server'. Searched: {sftp_server_paths}")
pytest.fail(f"Failed to find 'sftp-server'. Searched: {sftp_server_paths}")
else:
sshd_config_dict["Subsystem"] = f"sftp {sftp_server_path}"
factory = salt_factories.get_sshd_daemon(

View file

@ -1,570 +0,0 @@
import os
import pprint
import re
import shutil
import tempfile
import pytest
import salt.utils.files
import salt.utils.path
import salt.utils.platform
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
from tests.support.case import ModuleCase
from tests.support.helpers import VirtualEnv, patched_environ
from tests.support.runtests import RUNTIME_VARS
@pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False)
@pytest.mark.windows_whitelisted
class PipModuleTest(ModuleCase):
def setUp(self):
super().setUp()
self.venv_test_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
# Remove the venv test directory
self.addCleanup(shutil.rmtree, self.venv_test_dir, ignore_errors=True)
self.venv_dir = os.path.join(self.venv_test_dir, "venv")
self.patched_environ = patched_environ(
PIP_SOURCE_DIR="",
PIP_BUILD_DIR="",
__cleanup__=[k for k in os.environ if k.startswith("PIP_")],
)
self.patched_environ.__enter__()
self.addCleanup(self.patched_environ.__exit__)
def _check_download_error(self, ret):
"""
Checks to see if a download error looks transitory
"""
return any(w in ret for w in ["URLError", "Download error"])
def pip_successful_install(
self,
target,
expect=(
"irc3-plugins-test",
"pep8",
),
):
"""
isolate regex for extracting `successful install` message from pip
"""
expect = set(expect)
expect_str = "|".join(expect)
success = re.search(
r"^.*Successfully installed\s([^\n]+)(?:Clean.*)?", target, re.M | re.S
)
success_for = (
re.findall(r"({})(?:-(?:[\d\.-]))?".format(expect_str), success.groups()[0])
if success
else []
)
return expect.issubset(set(success_for))
@pytest.mark.slow_test
def test_issue_2087_missing_pip(self):
# Let's create the testing virtualenv
with VirtualEnv(self.venv_dir):
# Let's remove the pip binary
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
site_dir = self.run_function(
"virtualenv.get_distribution_path", [self.venv_dir, "pip"]
)
if salt.utils.platform.is_windows():
pip_bin = os.path.join(self.venv_dir, "Scripts", "pip.exe")
site_dir = os.path.join(self.venv_dir, "lib", "site-packages")
if not os.path.isfile(pip_bin):
self.skipTest("Failed to find the pip binary to the test virtualenv")
os.remove(pip_bin)
# Also remove the pip dir from site-packages
# This is needed now that we're using python -m pip instead of the
# pip binary directly. python -m pip will still work even if the
# pip binary is missing
shutil.rmtree(os.path.join(site_dir, "pip"))
# Let's run a pip depending functions
for func in ("pip.freeze", "pip.list"):
ret = self.run_function(func, bin_env=self.venv_dir)
assert (
"Command required for '{}' not found: Could not find a `pip` binary".format(
func
)
in ret
)
@pytest.mark.slow_test
def test_requirements_as_list_of_chains__cwd_set__absolute_file_path(self):
with VirtualEnv(self.venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install",
requirements=requirements_list,
bin_env=self.venv_dir,
cwd=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
found = self.pip_successful_install(ret["stdout"])
assert found
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_requirements_as_list_of_chains__cwd_not_set__absolute_file_path(self):
with VirtualEnv(self.venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
found = self.pip_successful_install(ret["stdout"])
assert found
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_requirements_as_list__absolute_file_path(self):
with VirtualEnv(self.venv_dir):
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
found = self.pip_successful_install(ret["stdout"])
assert found
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_requirements_as_list__non_absolute_file_path(self):
with VirtualEnv(self.venv_dir):
# Create a requirements file that depends on another one.
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req_cwd = self.venv_dir
req1_filepath = os.path.join(req_cwd, req1_filename)
req2_filepath = os.path.join(req_cwd, req2_filename)
with salt.utils.files.fopen(req1_filepath, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filepath, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = self.run_function(
"pip.install",
requirements=requirements_list,
bin_env=self.venv_dir,
cwd=req_cwd,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
found = self.pip_successful_install(ret["stdout"])
assert found
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_chained_requirements__absolute_file_path(self):
with VirtualEnv(self.venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install", requirements=req1_filename, bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
assert "installed pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_chained_requirements__non_absolute_file_path(self):
with VirtualEnv(self.venv_dir):
# Create a requirements file that depends on another one.
req_basepath = self.venv_dir
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req1_file = os.path.join(self.venv_dir, req1_filename)
req2_file = os.path.join(self.venv_dir, req2_filename)
with salt.utils.files.fopen(req1_file, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_file, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install",
requirements=req1_filename,
cwd=req_basepath,
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
assert "installed pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_issue_4805_nested_requirements(self):
with VirtualEnv(self.venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = self.run_function(
"pip.install",
requirements=req1_filename,
bin_env=self.venv_dir,
timeout=300,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
assert ret["retcode"] == 0
assert "installed pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_uninstall(self):
# Let's create the testing virtualenv
with VirtualEnv(self.venv_dir):
ret = self.run_function("pip.install", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
assert ret["retcode"] == 0
assert "installed pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.uninstall' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
assert "uninstalled pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_install_upgrade(self):
# Create the testing virtualenv
with VirtualEnv(self.venv_dir):
ret = self.run_function(
"pip.install", ["pep8==1.3.4"], bin_env=self.venv_dir
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
assert ret["retcode"] == 0
assert "installed pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
ret = self.run_function(
"pip.install", ["pep8"], bin_env=self.venv_dir, upgrade=True
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
assert ret["retcode"] == 0
assert "installed pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
if not isinstance(ret, dict):
self.fail(
"The 'pip.uninstall' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret["retcode"] == 0
assert "uninstalled pep8" in ret["stdout"]
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_install_multiple_editables(self):
editables = [
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
with VirtualEnv(self.venv_dir):
ret = self.run_function(
"pip.install",
[],
editable="{}".format(",".join(editables)),
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
assert ret["retcode"] == 0
for package in ("iStr", "SaltTesting"):
match = re.search(
r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format(
package
),
ret["stdout"],
)
assert match is not None
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_install_multiple_editables_and_pkgs(self):
editables = [
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
with VirtualEnv(self.venv_dir):
ret = self.run_function(
"pip.install",
["pep8"],
editable="{}".format(",".join(editables)),
bin_env=self.venv_dir,
)
if not isinstance(ret, dict):
self.fail(
"The 'pip.install' command did not return the excepted dictionary."
" Output:\n{}".format(ret)
)
try:
if self._check_download_error(ret["stdout"]):
self.skipTest("Test skipped due to pip download error")
assert ret["retcode"] == 0
for package in ("iStr", "SaltTesting", "pep8"):
match = re.search(
r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format(
package
),
ret["stdout"],
)
assert match is not None
except KeyError as exc:
self.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.skipif(
shutil.which("/bin/pip3") is None, reason="Could not find /bin/pip3"
)
@pytest.mark.skip_on_windows(reason="test specific for linux usage of /bin/python")
@pytest.mark.skip_initial_gh_actions_failure(
reason="This was skipped on older golden images and is failing on newer."
)
def test_system_pip3(self):
self.run_function(
"pip.install", pkgs=["lazyimport==0.0.1"], bin_env="/bin/pip3"
)
ret1 = self.run_function("cmd.run_all", ["/bin/pip3 freeze | grep lazyimport"])
assert "lazyimport==0.0.1" in ret1["stdout"]
self.run_function("pip.uninstall", pkgs=["lazyimport"], bin_env="/bin/pip3")
ret2 = self.run_function("cmd.run_all", ["/bin/pip3 freeze | grep lazyimport"])
assert ret2["stdout"] == ""

View file

@ -90,3 +90,18 @@ def test_versions_report(salt_cli):
assert "relenv" in ret_dict["Dependency Versions"]
assert "Salt Extensions" in ret_dict
assert "salt-analytics-framework" in ret_dict["Salt Extensions"]
def test_help_log(salt_cli):
"""
Test to ensure when we pass in `--help` the insecure
log warning is included.
"""
ret = salt_cli.run("--help")
count = 0
stdout = ret.stdout.split("\n")
for line in stdout:
if "sensitive data:" in line:
count += 1
assert line.strip() == "sensitive data: all, debug, garbage, profile, trace"
assert count == 2

View file

@ -0,0 +1,42 @@
"""
Tests for salt-run with show_jid
"""
import logging
import re
import pytest
log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def salt_master(salt_factories):
"""
Salt master with `show_jid: True`
"""
config_defaults = {
"show_jid": True,
}
salt_master = salt_factories.salt_master_daemon(
"salt-run-show-jid-master", defaults=config_defaults
)
with salt_master.started():
yield salt_master
@pytest.fixture(scope="module")
def salt_run_cli(salt_master):
"""
The ``salt-run`` CLI as a fixture against the running master
"""
assert salt_master.is_running()
return salt_master.salt_run_cli(timeout=30)
def test_salt_run_show_jid(salt_run_cli):
"""
Test that jid is output
"""
ret = salt_run_cli.run("test.stdout_print")
assert re.match(r"jid: \d+", ret.stdout)

View file

@ -46,7 +46,10 @@ def PKG_TARGETS(grains):
_PKG_TARGETS = ["lynx", "gnuplot"]
elif grains["os_family"] == "RedHat":
if grains["os"] == "VMware Photon OS":
_PKG_TARGETS = ["wget", "zsh-html"]
if grains["osmajorrelease"] >= 5:
_PKG_TARGETS = ["wget", "zsh"]
else:
_PKG_TARGETS = ["wget", "zsh-html"]
elif (
grains["os"] in ("CentOS Stream", "AlmaLinux")
and grains["osmajorrelease"] == 9
@ -73,7 +76,7 @@ def PKG_CAP_TARGETS(grains):
@pytest.fixture
def PKG_32_TARGETS(grains):
_PKG_32_TARGETS = []
if grains["os_family"] == "RedHat":
if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon":
if grains["os"] == "CentOS":
_PKG_32_TARGETS.append("xz-devel.i686")
if not _PKG_32_TARGETS:
@ -84,7 +87,7 @@ def PKG_32_TARGETS(grains):
@pytest.fixture
def PKG_DOT_TARGETS(grains):
_PKG_DOT_TARGETS = []
if grains["os_family"] == "RedHat":
if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon":
if grains["osmajorrelease"] == 7:
_PKG_DOT_TARGETS = ["tomcat-el-2.2-api"]
elif grains["osmajorrelease"] == 8:
@ -99,7 +102,7 @@ def PKG_DOT_TARGETS(grains):
@pytest.fixture
def PKG_EPOCH_TARGETS(grains):
_PKG_EPOCH_TARGETS = []
if grains["os_family"] == "RedHat":
if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon":
if grains["osmajorrelease"] == 7:
_PKG_EPOCH_TARGETS = ["comps-extras"]
elif grains["osmajorrelease"] == 8:

View file

@ -40,7 +40,7 @@ def test_fib_txt_output(salt_call_cli):
@pytest.mark.parametrize("indent", [-1, 0, 1])
def test_json_out_indent(salt_call_cli, indent):
ret = salt_call_cli.run("--out=json", "--out-indent={}".format(indent), "test.ping")
ret = salt_call_cli.run("--out=json", f"--out-indent={indent}", "test.ping")
assert ret.returncode == 0
assert ret.data is True
if indent == -1:
@ -164,7 +164,7 @@ def test_issue_14979_output_file_permissions(salt_call_cli):
try:
stat1 = output_file.stat()
except OSError:
pytest.fail("Failed to generate output file {}".format(output_file))
pytest.fail(f"Failed to generate output file {output_file}")
# Let's change umask
os.umask(0o777) # pylint: disable=blacklisted-function
@ -187,7 +187,7 @@ def test_issue_14979_output_file_permissions(salt_call_cli):
try:
stat3 = output_file.stat()
except OSError:
pytest.fail("Failed to generate output file {}".format(output_file))
pytest.fail(f"Failed to generate output file {output_file}")
# Mode must have changed since we're creating a new log file
assert stat1.st_mode != stat3.st_mode
@ -290,7 +290,7 @@ def test_syslog_file_not_found(salt_minion, salt_call_cli, tmp_path):
with salt.utils.files.fopen(str(config_dir / "minion"), "w") as fh_:
fh_.write(salt.utils.yaml.dump(minion_config, default_flow_style=False))
ret = salt_call_cli.run(
"--config-dir={}".format(config_dir),
f"--config-dir={config_dir}",
"--log-level=debug",
"cmd.run",
"echo foo",
@ -429,3 +429,74 @@ def test_local_salt_call_no_function_no_retcode(salt_call_cli):
assert "test" in ret.data
assert ret.data["test"] == "'test' is not available."
assert "test.echo" in ret.data
def test_state_highstate_custom_grains(salt_master, salt_minion_factory):
"""
This test ensure that custom grains in salt://_grains are loaded before pillar compilation
to ensure that any use of custom grains in pillar files are available, this implies that
a sync of grains occurs before loading the regular /etc/salt/grains or configuration file
grains, as well as the usual grains.
Note: cannot use salt_minion and salt_call_cli, since these will be loaded before
the pillar and custom_grains files are written, hence using salt_minion_factory.
"""
pillar_top_sls = """
base:
'*':
- defaults
"""
pillar_defaults_sls = """
mypillar: "{{ grains['custom_grain'] }}"
"""
salt_top_sls = """
base:
'*':
- test
"""
salt_test_sls = """
"donothing":
test.nop: []
"""
salt_custom_grains_py = """
def main():
return {'custom_grain': 'test_value'}
"""
assert salt_master.is_running()
with salt_minion_factory.started():
salt_minion = salt_minion_factory
salt_call_cli = salt_minion_factory.salt_call_cli()
with salt_minion.pillar_tree.base.temp_file(
"top.sls", pillar_top_sls
), salt_minion.pillar_tree.base.temp_file(
"defaults.sls", pillar_defaults_sls
), salt_minion.state_tree.base.temp_file(
"top.sls", salt_top_sls
), salt_minion.state_tree.base.temp_file(
"test.sls", salt_test_sls
), salt_minion.state_tree.base.temp_file(
"_grains/custom_grain.py", salt_custom_grains_py
):
ret = salt_call_cli.run("--local", "state.highstate")
assert ret.returncode == 0
ret = salt_call_cli.run("--local", "pillar.items")
assert ret.returncode == 0
assert ret.data
pillar_items = ret.data
assert "mypillar" in pillar_items
assert pillar_items["mypillar"] == "test_value"
def test_salt_call_versions(salt_call_cli, caplog):
"""
Call test.versions without '--local' to test grains
are sync'd without any missing keys in opts
"""
with caplog.at_level(logging.DEBUG):
ret = salt_call_cli.run("test.versions")
assert ret.returncode == 0
assert "Failed to sync grains module: 'master_uri'" not in caplog.messages

View file

@ -1,9 +1,14 @@
import logging
import pathlib
import time
import types
import attr
import pytest
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.slow_test,
pytest.mark.windows_whitelisted,
@ -210,7 +215,7 @@ class PillarRefresh:
"top.sls", top_file_contents
)
self.minion_1_pillar = self.master.pillar_tree.base.temp_file(
"minion-1-pillar.sls", "{}: true".format(self.pillar_key)
"minion-1-pillar.sls", f"{self.pillar_key}: true"
)
self.top_file.__enter__()
self.minion_1_pillar.__enter__()
@ -588,3 +593,106 @@ def test_pillar_ext_59975(salt_call_cli):
"""
ret = salt_call_cli.run("pillar.ext", '{"libvert": _}')
assert "ext_pillar_opts" in ret.data
@pytest.fixture
def event_listerner_timeout(grains):
if grains["os"] == "Windows":
if grains["osrelease"].startswith("2019"):
return types.SimpleNamespace(catch=120, miss=30)
return types.SimpleNamespace(catch=90, miss=10)
return types.SimpleNamespace(catch=60, miss=10)
@pytest.mark.slow_test
def test_pillar_refresh_pillar_beacons(
base_env_pillar_tree_root_dir,
salt_cli,
salt_minion,
salt_master,
event_listener,
event_listerner_timeout,
):
"""
Ensure beacons jobs in pillar are started after
a pillar refresh and then not running when pillar
is cleared.
"""
top_sls = """
base:
'{}':
- test_beacons
""".format(
salt_minion.id
)
test_beacons_sls_empty = ""
test_beacons_sls = """
beacons:
status:
- loadavg:
- 1-min
"""
assert salt_minion.is_running()
top_tempfile = pytest.helpers.temp_file(
"top.sls", top_sls, base_env_pillar_tree_root_dir
)
beacon_tempfile = pytest.helpers.temp_file(
"test_beacons.sls", test_beacons_sls_empty, base_env_pillar_tree_root_dir
)
with top_tempfile, beacon_tempfile:
# Calling refresh_pillar to update in-memory pillars
salt_cli.run("saltutil.refresh_pillar", wait=True, minion_tgt=salt_minion.id)
# Ensure beacons start when pillar is refreshed
with salt_master.pillar_tree.base.temp_file(
"test_beacons.sls", test_beacons_sls
):
# Calling refresh_pillar to update in-memory pillars
salt_cli.run(
"saltutil.refresh_pillar", wait=True, minion_tgt=salt_minion.id
)
# Give the beacons a chance to start
time.sleep(5)
event_tag = f"salt/beacon/*/status/*"
start_time = time.time()
event_pattern = (salt_master.id, event_tag)
matched_events = event_listener.wait_for_events(
[event_pattern],
after_time=start_time,
timeout=event_listerner_timeout.catch,
)
assert matched_events.found_all_events
# Ensure beacons sttop when pillar is refreshed
with salt_master.pillar_tree.base.temp_file(
"test_beacons.sls", test_beacons_sls_empty
):
# Calling refresh_pillar to update in-memory pillars
salt_cli.run(
"saltutil.refresh_pillar", wait=True, minion_tgt=salt_minion.id
)
# Give the beacons a chance to stop
time.sleep(5)
event_tag = f"salt/beacon/*/status/*"
start_time = time.time()
event_pattern = (salt_master.id, event_tag)
matched_events = event_listener.wait_for_events(
[event_pattern],
after_time=start_time,
timeout=event_listerner_timeout.miss,
)
assert not matched_events.found_all_events

View file

@ -0,0 +1,651 @@
import os
import pprint
import re
import shutil
import pytest
import salt.utils.files
import salt.utils.path
import salt.utils.platform
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
from tests.support.helpers import VirtualEnv, patched_environ
pytestmark = [
pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False),
pytest.mark.windows_whitelisted,
]
@pytest.fixture(autouse=True)
def patch_environment():
with patched_environ(
PIP_SOURCE_DIR="",
PIP_BUILD_DIR="",
__cleanup__=[k for k in os.environ if k.startswith("PIP_")],
):
yield
@pytest.fixture
def venv_dir(tmp_path):
return str(tmp_path / "venv_dir")
def _check_download_error(ret):
"""
Checks to see if a download error looks transitory
"""
return any(w in ret for w in ["URLError", "Download error"])
def _pip_successful_install(
target,
expect=(
"irc3-plugins-test",
"pep8",
),
):
"""
isolate regex for extracting `successful install` message from pip
"""
expect = set(expect)
expect_str = "|".join(expect)
success = re.search(
r"^.*Successfully installed\s([^\n]+)(?:Clean.*)?", target, re.M | re.S
)
success_for = (
re.findall(rf"({expect_str})(?:-(?:[\d\.-]))?", success.groups()[0])
if success
else []
)
return expect.issubset(set(success_for))
@pytest.mark.slow_test
def test_issue_2087_missing_pip(venv_dir, salt_cli, salt_minion):
# Let's create the testing virtualenv
with VirtualEnv(venv_dir):
# Let's remove the pip binary
pip_bin = os.path.join(venv_dir, "bin", "pip")
site_dir = salt_cli.run(
"virtualenv.get_distribution_path",
venv_dir,
"pip",
minion_tgt=salt_minion.id,
).data
if salt.utils.platform.is_windows():
pip_bin = os.path.join(venv_dir, "Scripts", "pip.exe")
site_dir = os.path.join(venv_dir, "lib", "site-packages")
if not os.path.isfile(pip_bin):
pytest.skip("Failed to find the pip binary to the test virtualenv")
os.remove(pip_bin)
# Also remove the pip dir from site-packages
# This is needed now that we're using python -m pip instead of the
# pip binary directly. python -m pip will still work even if the
# pip binary is missing
shutil.rmtree(os.path.join(site_dir, "pip"))
# Let's run a pip depending functions
for func in ("pip.freeze", "pip.list"):
ret = salt_cli.run(func, bin_env=venv_dir, minion_tgt=salt_minion.id).data
assert (
"Command required for '{}' not found: Could not find a `pip` binary".format(
func
)
in ret
)
@pytest.mark.slow_test
def test_requirements_as_list_of_chains__cwd_set__absolute_file_path(
venv_dir, salt_cli, salt_minion
):
with VirtualEnv(venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(venv_dir, "requirements1.txt")
req1b_filename = os.path.join(venv_dir, "requirements1b.txt")
req2_filename = os.path.join(venv_dir, "requirements2.txt")
req2b_filename = os.path.join(venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = salt_cli.run(
"pip.install",
requirements=requirements_list,
bin_env=venv_dir,
cwd=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
found = _pip_successful_install(ret.stdout)
assert found
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_requirements_as_list_of_chains__cwd_not_set__absolute_file_path(
venv_dir, salt_cli, salt_minion
):
with VirtualEnv(venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(venv_dir, "requirements1.txt")
req1b_filename = os.path.join(venv_dir, "requirements1b.txt")
req2_filename = os.path.join(venv_dir, "requirements2.txt")
req2b_filename = os.path.join(venv_dir, "requirements2b.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements1b.txt\n")
with salt.utils.files.fopen(req1b_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("-r requirements2b.txt\n")
with salt.utils.files.fopen(req2b_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = salt_cli.run(
"pip.install",
requirements=requirements_list,
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
found = _pip_successful_install(ret.stdout)
assert found
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_requirements_as_list__absolute_file_path(venv_dir, salt_cli, salt_minion):
with VirtualEnv(venv_dir):
req1_filename = os.path.join(venv_dir, "requirements.txt")
req2_filename = os.path.join(venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = salt_cli.run(
"pip.install",
requirements=requirements_list,
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
found = _pip_successful_install(ret.stdout)
assert found
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_requirements_as_list__non_absolute_file_path(venv_dir, salt_cli, salt_minion):
with VirtualEnv(venv_dir):
# Create a requirements file that depends on another one.
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req_cwd = venv_dir
req1_filepath = os.path.join(req_cwd, req1_filename)
req2_filepath = os.path.join(req_cwd, req2_filename)
with salt.utils.files.fopen(req1_filepath, "w") as f:
f.write("irc3-plugins-test\n")
with salt.utils.files.fopen(req2_filepath, "w") as f:
f.write("pep8\n")
requirements_list = [req1_filename, req2_filename]
ret = salt_cli.run(
"pip.install",
f"cwd={req_cwd}",
requirements=requirements_list,
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
found = _pip_successful_install(ret.stdout)
assert found
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_chained_requirements__absolute_file_path(venv_dir, salt_cli, salt_minion):
with VirtualEnv(venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(venv_dir, "requirements.txt")
req2_filename = os.path.join(venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = salt_cli.run(
"pip.install",
requirements=req1_filename,
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
assert "installed pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_chained_requirements__non_absolute_file_path(venv_dir, salt_cli, salt_minion):
with VirtualEnv(venv_dir):
# Create a requirements file that depends on another one.
req_basepath = venv_dir
req1_filename = "requirements.txt"
req2_filename = "requirements2.txt"
req1_file = os.path.join(venv_dir, req1_filename)
req2_file = os.path.join(venv_dir, req2_filename)
with salt.utils.files.fopen(req1_file, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_file, "w") as f:
f.write("pep8")
ret = salt_cli.run(
"pip.install",
f"cwd={req_basepath}",
requirements=req1_filename,
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
assert "installed pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_issue_4805_nested_requirements(venv_dir, salt_cli, salt_minion):
with VirtualEnv(venv_dir):
# Create a requirements file that depends on another one.
req1_filename = os.path.join(venv_dir, "requirements.txt")
req2_filename = os.path.join(venv_dir, "requirements2.txt")
with salt.utils.files.fopen(req1_filename, "w") as f:
f.write("-r requirements2.txt")
with salt.utils.files.fopen(req2_filename, "w") as f:
f.write("pep8")
ret = salt_cli.run(
"pip.install",
requirements=req1_filename,
bin_env=venv_dir,
timeout=300,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
if _check_download_error(ret.stdout):
pytest.skip("Test skipped due to pip download error")
assert ret.returncode == 0
assert "installed pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_uninstall(venv_dir, salt_cli, salt_minion):
# Let's create the testing virtualenv
with VirtualEnv(venv_dir):
ret = salt_cli.run(
"pip.install", ["pep8"], bin_env=venv_dir, minion_tgt=salt_minion.id
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
if _check_download_error(ret.stdout):
pytest.skip("Test skipped due to pip download error")
assert ret.returncode == 0
assert "installed pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
ret = salt_cli.run(
"pip.uninstall", ["pep8"], bin_env=venv_dir, minion_tgt=salt_minion.id
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.uninstall' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
assert "uninstalled pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_install_upgrade(venv_dir, salt_cli, salt_minion):
# Create the testing virtualenv
with VirtualEnv(venv_dir):
ret = salt_cli.run(
"pip.install", "pep8==1.3.4", bin_env=venv_dir, minion_tgt=salt_minion.id
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
if _check_download_error(ret.stdout):
pytest.skip("Test skipped due to pip download error")
assert ret.returncode == 0
assert "installed pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
ret = salt_cli.run(
"pip.install",
"pep8",
bin_env=venv_dir,
upgrade=True,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
if _check_download_error(ret.stdout):
pytest.skip("Test skipped due to pip download error")
assert ret.returncode == 0
assert "installed pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
ret = salt_cli.run(
"pip.uninstall", "pep8", bin_env=venv_dir, minion_tgt=salt_minion.id
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.uninstall' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
assert ret.returncode == 0
assert "uninstalled pep8" in ret.stdout
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_install_multiple_editables(venv_dir, salt_cli, salt_minion):
editables = [
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
with VirtualEnv(venv_dir):
ret = salt_cli.run(
"pip.install",
[],
editable="{}".format(",".join(editables)),
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
if _check_download_error(ret.stdout):
pytest.skip("Test skipped due to pip download error")
assert ret.returncode == 0
for package in ("iStr", "SaltTesting"):
match = re.search(
rf"(?:.*)(Successfully installed)(?:.*)({package})(?:.*)",
ret.stdout,
)
assert match is not None
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.slow_test
def test_pip_install_multiple_editables_and_pkgs(venv_dir, salt_cli, salt_minion):
editables = [
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
# Create the testing virtualenv
with VirtualEnv(venv_dir):
ret = salt_cli.run(
"pip.install",
["pep8"],
editable="{}".format(",".join(editables)),
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
try:
if _check_download_error(ret.stdout):
pytest.skip("Test skipped due to pip download error")
assert ret.returncode == 0
for package in ("iStr", "SaltTesting", "pep8"):
match = re.search(
rf"(?:.*)(Successfully installed)(?:.*)({package})(?:.*)",
ret.stdout,
)
assert match is not None
except KeyError as exc:
pytest.fail(
"The returned dictionary is missing an expected key. Error: '{}'."
" Dictionary: {}".format(exc, pprint.pformat(ret))
)
@pytest.mark.parametrize("touch", [True, False])
@pytest.mark.slow_test
def test_pip_non_existent_log_file(venv_dir, salt_cli, salt_minion, tmp_path, touch):
log_file = tmp_path / "tmp-pip-install.log"
if touch:
log_file.touch()
# Create the testing virtualenv
with VirtualEnv(venv_dir):
ret = salt_cli.run(
"pip.install",
["pep8"],
log=str(log_file),
bin_env=venv_dir,
minion_tgt=salt_minion.id,
)
if not isinstance(ret.data, dict):
pytest.fail(
"The 'pip.install' command did not return the expected dictionary."
" Output:\n{}".format(ret)
)
if _check_download_error(ret.stdout):
pytest.skip("Test skipped due to pip download error")
assert ret.returncode == 0
assert log_file.exists()
assert "pep8" in log_file.read_text()
@pytest.mark.skipif(
shutil.which("/bin/pip3") is None, reason="Could not find /bin/pip3"
)
@pytest.mark.skip_on_windows(reason="test specific for linux usage of /bin/python")
@pytest.mark.skip_initial_gh_actions_failure(
reason="This was skipped on older golden images and is failing on newer."
)
def test_system_pip3(salt_cli, salt_minion):
salt_cli.run(
"pip.install",
pkgs=["lazyimport==0.0.1"],
bin_env="/bin/pip3",
minion_tgt=salt_minion.id,
)
ret1 = salt_cli.run(
"cmd.run_all", "/bin/pip3 freeze | grep lazyimport", minion_tgt=salt_minion.id
)
assert "lazyimport==0.0.1" in ret1.stdout
salt_cli.run(
"pip.uninstall",
pkgs=["lazyimport"],
bin_env="/bin/pip3",
minion_tgt=salt_minion.id,
)
ret2 = salt_cli.run(
"cmd.run_all", "/bin/pip3 freeze | grep lazyimport", minion_tgt=salt_minion.id
)
assert ret2.data["stdout"] == ""

View file

@ -18,7 +18,7 @@ EXAMPLE_PROVIDERS = {
"vmware": {
"driver": "vmware",
"password": "123456",
"url": "vca1.saltstack.com",
"url": "vca1.localhost",
"minion": {"master": "providermaster", "grains": {"providergrain": True}},
"profiles": {},
"user": "root",
@ -31,7 +31,7 @@ EXAMPLE_PROVIDERS = {
"profiles": {},
"minion": {"master": "providermaster", "grains": {"providergrain": True}},
"image": "rhel6_64prod",
"url": "vca2.saltstack.com",
"url": "vca2.localhost",
"user": "root",
}
},
@ -99,10 +99,12 @@ def salt_cloud_config_file(salt_master_factory):
return os.path.join(salt_master_factory.config_dir, "cloud")
def test_cloud_map_merge_conf(salt_cloud_config_file):
def test_cloud_map_merge_conf(salt_cloud_config_file, grains):
"""
Ensure that nested values can be selectivly overridden in a map file
"""
if grains["os"] == "VMware Photon OS" and grains["osmajorrelease"] == 3:
pytest.skip("Test hangs on PhotonOS 3")
with patch(
"salt.config.check_driver_dependencies", MagicMock(return_value=True)
), patch("salt.cloud.Map.read", MagicMock(return_value=EXAMPLE_MAP)):
@ -158,7 +160,7 @@ def test_cloud_map_merge_conf(salt_cloud_config_file):
"profile": "nyc-vm",
"provider": "nyc_vcenter:vmware",
"resourcepool": "Resources",
"url": "vca1.saltstack.com",
"url": "vca1.localhost",
"user": "root",
},
"db2": {
@ -196,7 +198,7 @@ def test_cloud_map_merge_conf(salt_cloud_config_file):
"profile": "nyc-vm",
"provider": "nj_vcenter:vmware",
"resourcepool": "Resources",
"url": "vca2.saltstack.com",
"url": "vca2.localhost",
"user": "root",
},
"db3": {
@ -216,7 +218,7 @@ def test_cloud_map_merge_conf(salt_cloud_config_file):
"profile": "nj-vm",
"provider": "nj_vcenter:vmware",
"resourcepool": "Resources",
"url": "vca2.saltstack.com",
"url": "vca2.localhost",
"user": "root",
},
}

View file

@ -23,6 +23,7 @@ from salt.exceptions import (
CommandNotFoundError,
SaltInvocationError,
)
from salt.utils.odict import OrderedDict
from tests.support.mock import MagicMock, Mock, call, mock_open, patch
try:
@ -275,6 +276,41 @@ def test_add_repo_key(repo_keys_var):
)
def test_add_repo_key_none_specified(repo_keys_var):
"""
Test - Add a repo key when we do not specify any arguments
"""
with patch(
"salt.modules.aptpkg.get_repo_keys", MagicMock(return_value=repo_keys_var)
):
mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"})
with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}):
with pytest.raises(TypeError) as err:
aptpkg.add_repo_key()
assert err.value.args[0] == "add_repo_key() takes at least 1 argument (0 given)"
def test_add_repo_key_no_keyfile(repo_keys_var, caplog, tmp_path):
"""
Test - Add a repo key when aptkey is false
and keyfile not specified when using a keyserver
"""
with patch("salt.modules.aptpkg.get_repo_keys", MagicMock(return_value={})):
mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"})
with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}):
ret = aptpkg.add_repo_key(
keyserver="keyserver.ubuntu.com",
keyid="FBB75451",
keydir=tmp_path,
aptkey=False,
)
assert ret is False
assert (
"You must define the name of the key file to save the key"
in caplog.text
)
def test_add_repo_key_failed(repo_keys_var):
"""
Test - Add a repo key using incomplete input data.
@ -289,6 +325,56 @@ def test_add_repo_key_failed(repo_keys_var):
aptpkg.add_repo_key(**kwargs)
def test_add_repo_key_keydir_not_exists(repo_keys_var, tmp_path, caplog):
"""
Test - Add a repo key when aptkey is False
and the keydir does not exist
"""
with patch(
"salt.modules.aptpkg.get_repo_keys", MagicMock(return_value=repo_keys_var)
):
mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"})
with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}):
ret = aptpkg.add_repo_key(
keyserver="keyserver.ubuntu.com",
keyid="FBB75451",
keyfile="test-key.gpg",
aptkey=False,
keydir=str(tmp_path / "doesnotexist"),
)
assert "does not exist. Please create this directory" in caplog.text
assert ret is False
@pytest.mark.parametrize(
"kwargs, err_msg",
[
(
{"keyid": "FBB75451", "keyfile": "test-key.gpg"},
"No keyserver specified for keyid",
),
(
{"keyserver": "keyserver.ubuntu.com", "keyfile": "test-key.gpg"},
"No keyid or keyid too short for keyserver",
),
],
)
def test_add_repo_key_keyserver_keyid_not_sepcified(
repo_keys_var, tmp_path, caplog, kwargs, err_msg
):
"""
Test - Add a repo key when and keyid is set without a keyserver
Also test when keyserver is set but without keyid
"""
short_key = list(repo_keys_var.keys())[0][-8:]
with patch("salt.modules.aptpkg.get_repo_keys", MagicMock(return_value={})):
mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"})
with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}):
with pytest.raises(SaltInvocationError) as err:
aptpkg.add_repo_key(**kwargs)
assert err_msg in err.value.message
def test_get_repo_keys(repo_keys_var):
"""
Test - List known repo key details.
@ -370,6 +456,103 @@ def test_owner():
assert aptpkg.owner(*paths) == "wget"
def test_owner_no_path():
"""
Test owner when path is not passed
"""
ret = aptpkg.owner()
assert ret == ""
def test_owner_doesnotexist():
"""
Test owner when the path does not exist
"""
mock = MagicMock(return_value="")
with patch.dict(aptpkg.__salt__, {"cmd.run_stdout": mock}):
ret = aptpkg.owner("/doesnotexist")
assert ret == ""
def test_get_http_proxy_url_username_passwd():
"""
Test _get_http_proxy_url when username and passwod set
"""
host = "repo.saltproject.io"
port = "888"
user = "user"
passwd = "password"
mock_conf = MagicMock()
mock_conf.side_effect = [host, port, user, passwd]
patch_conf = patch.dict(aptpkg.__salt__, {"config.option": mock_conf})
with patch_conf:
ret = aptpkg._get_http_proxy_url()
assert ret == f"http://{user}:{passwd}@{host}:{port}"
def test_get_http_proxy_url():
"""
Test basic functionality for _get_http_proxy_url
"""
host = "repo.saltproject.io"
port = "888"
user = ""
passwd = ""
mock_conf = MagicMock()
mock_conf.side_effect = [host, port, user, passwd]
patch_conf = patch.dict(aptpkg.__salt__, {"config.option": mock_conf})
with patch_conf:
ret = aptpkg._get_http_proxy_url()
assert ret == f"http://{host}:{port}"
def test_get_http_proxy_url_empty():
"""
Test _get_http_proxy_Url when host and port are empty
"""
host = ""
port = ""
user = ""
passwd = ""
mock_conf = MagicMock()
mock_conf.side_effect = [host, port, user, passwd]
patch_conf = patch.dict(aptpkg.__salt__, {"config.option": mock_conf})
with patch_conf:
ret = aptpkg._get_http_proxy_url()
assert ret == ""
def test_list_upgrades():
"""
Test basic functinoality for list_upgrades
"""
patch_data = patch("salt.utils.data.is_true", return_value=True)
patch_refresh = patch("salt.modules.aptpkg.refresh_db")
apt_ret = {
"pid": 2791,
"retcode": 0,
"stdout": "Reading package lists...\nBuilding dependency tree...\nReading state information...\nCalculating upgrade...\nThe following NEW packages will be installed:\n linux-cloud-tools-5.15.0-86 linux-cloud-tools-5.15.0-86-generic\n linux-headers-5.15.0-86 linux-headers-5.15.0-86-generic\n linux-image-5.15.0-86-generic linux-modules-5.15.0-86-generic\n linux-modules-extra-5.15.0-86-generic\nThe following packages have been kept back:\n libnetplan0 libsgutils2-2 netplan. io sg3-utils sg3-utils-udev\nThe following packages will be upgraded:\n linux-cloud-tools-virtual linux-generic linux-headers-generic\n linux-image-generic\n4 upgraded, 7 newly installed, 0 to remove and 5 not upgraded.\nInst linux-cloud-tools-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-cloud-tools-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-cloud-tools-virtual [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-modules-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-image-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-modules-extra-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-generic [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-image-generic [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-headers-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [all]) []\nInst linux-headers-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-headers-generic [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-cloud-tools-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-cloud-tools-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-cloud-tools-virtual (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-modules-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-image-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-modules-extra-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-generic (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-image-generic (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-headers-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [all])\nConf linux-headers-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-headers-generic (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])",
"stderr": "Running scope as unit: run-r014f3eae66364254b1cdacf701f1ab73.scope",
}
mock_apt = MagicMock(return_value=apt_ret)
patch_apt = patch("salt.modules.aptpkg._call_apt", mock_apt)
with patch_data, patch_refresh, patch_apt:
ret = aptpkg.list_upgrades(dist_upgrade=False)
assert ret == {
"linux-cloud-tools-5.15.0-86": "5.15.0-86.96",
"linux-cloud-tools-5.15.0-86-generic": "5.15.0-86.96",
"linux-cloud-tools-virtual": "5.15.0.86.83",
"linux-modules-5.15.0-86-generic": "5.15.0-86.96",
"linux-image-5.15.0-86-generic": "5.15.0-86.96",
"linux-modules-extra-5.15.0-86-generic": "5.15.0-86.96",
"linux-generic": "5.15.0.86.83",
"linux-image-generic": "5.15.0.86.83",
"linux-headers-5.15.0-86": "5.15.0-86.96",
"linux-headers-5.15.0-86-generic": "5.15.0-86.96",
"linux-headers-generic": "5.15.0.86.83",
}
def test_refresh_db(apt_q_update_var):
"""
Test - Updates the APT database to latest packages based upon repositories.
@ -1120,6 +1303,20 @@ def test_expand_repo_def_cdrom():
)
def test__expand_repo_def_not_repo():
"""
Checks results from _expand_repo_def
when repo is not in kwargs
"""
with pytest.raises(SaltInvocationError) as err:
aptpkg._expand_repo_def(
os_name="debian",
os_codename="stretch",
architectures="amd64",
)
assert err.value.message == "missing 'repo' argument"
def test_list_pkgs():
"""
Test packages listing.
@ -1411,6 +1608,700 @@ def test_sourceslist_architectures(repo_line):
assert source.architectures == ["amd64"]
@pytest.mark.parametrize(
"pkg,arch",
[
("zsh", "amd64"),
("php", "x86_64"),
],
)
def test_parse_arch(pkg, arch):
"""
Test parse_arch when we pass in
valid package and arch names
"""
ret = aptpkg.parse_arch(f"{pkg}:{arch}")
assert ret == {"name": pkg, "arch": arch}
@pytest.mark.parametrize(
"pkg",
[
"php",
],
)
def test_parse_arch_invalid(pkg):
"""
Test parse_arch when we pass in
invalid package and arch names
"""
ret = aptpkg.parse_arch(f"{pkg}")
assert ret == {"name": pkg, "arch": None}
def test_latest_version_repo_kwarg():
"""
Test latest_version when `repo` is passed in as a kwarg
"""
with pytest.raises(SaltInvocationError) as exc:
aptpkg.latest_version("php", repo="https://repo.com")
assert exc.value.message == "The 'repo' argument is invalid, use 'fromrepo' instead"
def test_latest_version_names_empty():
"""
Test latest_version when names is empty
"""
ret = aptpkg.latest_version()
assert ret == ""
def test_latest_version_fromrepo():
"""
test latest_version when `fromrepo` is passed in as a kwarg
"""
version = "5.15.0.86.83"
fromrepo = "jammy-updates"
list_ret = {"linux-cloud-tools-virtual": [version]}
apt_ret = {
"pid": 4361,
"retcode": 0,
"stdout": "linux-cloud-tools-virtual:\n"
f"Installed: 5.15.0.69.67\n Candidate: {version}\n Version"
f"table:\n {version} 990\n 990"
f"https://mirrors.edge.kernel.org/ubuntu {fromrepo}/main amd64"
"Packages\n 500 https://mirrors.edge.kernel.org/ubuntu"
"jammy-security/main amd64 Packages\n ***5.15.0.69.67 100\n"
"100 /var/lib/dpkg/status\n 5.15.0.25.27 500\n 500"
"https://mirrors.edge.kernel.org/ubuntu jammy/main amd64 Packages",
"stderr": "",
}
mock_apt = MagicMock(return_value=apt_ret)
patch_apt = patch("salt.modules.aptpkg._call_apt", mock_apt)
mock_list_pkgs = MagicMock(return_value=list_ret)
patch_list_pkgs = patch("salt.modules.aptpkg.list_pkgs", mock_list_pkgs)
with patch_apt, patch_list_pkgs:
ret = aptpkg.latest_version(
"linux-cloud-tools-virtual",
fromrepo=fromrepo,
refresh=False,
show_installed=True,
)
assert ret == version
assert mock_apt.call_args == call(
[
"apt-cache",
"-q",
"policy",
"linux-cloud-tools-virtual",
"-o",
f"APT::Default-Release={fromrepo}",
],
scope=False,
)
def test_latest_version_fromrepo_multiple_names():
"""
test latest_version when multiple names of pkgs are pased
"""
version = "5.15.0.86.83"
fromrepo = "jammy-updates"
list_ret = {
"linux-cloud-tools-virtual": ["5.15.0.69.67"],
"linux-generic": ["5.15.0.69.67"],
}
apt_ret_cloud = {
"pid": 4361,
"retcode": 0,
"stdout": "linux-cloud-tools-virtual:\n"
f"Installed: 5.15.0.69.67\n Candidate: {version}\n Version"
f"table:\n {version} 990\n 990"
f"https://mirrors.edge.kernel.org/ubuntu {fromrepo}/main amd64"
"Packages\n 500 https://mirrors.edge.kernel.org/ubuntu"
"jammy-security/main amd64 Packages\n ***5.15.0.69.67 100\n"
"100 /var/lib/dpkg/status\n 5.15.0.25.27 500\n 500"
"https://mirrors.edge.kernel.org/ubuntu jammy/main amd64 Packages",
"stderr": "",
}
apt_ret_generic = {
"pid": 4821,
"retcode": 0,
"stdout": "linux-generic:\n"
f"Installed: 5.15.0.69.67\n Candidate: {version}\n"
f"Version table:\n {version} 990\n 990"
"https://mirrors.edge.kernel.org/ubuntu"
"jammy-updates/main amd64 Packages\n 500"
"https://mirrors.edge.kernel.org/ubuntu"
"jammy-security/main amd64 Packages\n *** 5.15.0.69.67"
"100\n 100 /var/lib/dpkg/status\n 5.15.0.25.27"
"500\n 500 https://mirrors.edge.kernel.org/ubuntu"
"jammy/main amd64 Packages",
"stderr": "",
}
mock_apt = MagicMock()
mock_apt.side_effect = [apt_ret_cloud, apt_ret_generic]
patch_apt = patch("salt.modules.aptpkg._call_apt", mock_apt)
mock_list_pkgs = MagicMock(return_value=list_ret)
patch_list_pkgs = patch("salt.modules.aptpkg.list_pkgs", mock_list_pkgs)
with patch_apt, patch_list_pkgs:
ret = aptpkg.latest_version(
"linux-cloud-tools-virtual",
"linux-generic",
fromrepo=fromrepo,
refresh=False,
show_installed=True,
)
assert ret == {"linux-cloud-tools-virtual": version, "linux-generic": version}
assert mock_apt.call_args_list == [
call(
[
"apt-cache",
"-q",
"policy",
"linux-cloud-tools-virtual",
"-o",
"APT::Default-Release=jammy-updates",
],
scope=False,
),
call(
[
"apt-cache",
"-q",
"policy",
"linux-generic",
"-o",
"APT::Default-Release=jammy-updates",
],
scope=False,
),
]
def test_hold():
"""
test aptpkg.hold() when passing in the name of a package
"""
set_sel = {"vim": {"old": "install", "new": "hold"}}
get_sel = {"hold": []}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel)
with patch_get_sel, patch_set_sel:
ret = aptpkg.hold("vim")
assert ret == {
"vim": {
"name": "vim",
"changes": {"old": "install", "new": "hold"},
"result": True,
"comment": "Package vim is now being held.",
}
}
def test_hold_no_name_pkgs():
"""
test aptpkg.hold when we do not pass in a name or list of pkgs
"""
with pytest.raises(SaltInvocationError) as err:
aptpkg.hold()
assert err.value.message == "One of name, pkgs, or sources must be specified."
def test_hold_pkgs_sources():
"""
test aptpkg.hold when we we set sources and a list of pkgs.
"""
with pytest.raises(SaltInvocationError) as err:
aptpkg.hold(
pkgs=["vim", "apache2"], sources=["http://source1", "http://source2"]
)
assert err.value.message == "Only one of pkgs or sources can be specified."
@pytest.mark.parametrize(
"sources",
[
[
OrderedDict(
[
(
"vim",
"https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb",
)
]
)
],
[
(
"vim",
"https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb",
)
],
],
)
def test_hold_sources(sources):
"""
test aptpkg.hold when using sources
"""
set_sel = {"vim": {"old": "install", "new": "hold"}}
get_sel = {"hold": []}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel)
with patch_get_sel, patch_set_sel:
ret = aptpkg.hold(sources=sources)
assert ret == {
"vim": {
"name": "vim",
"changes": {"old": "install", "new": "hold"},
"result": True,
"comment": "Package vim is now being held.",
}
}
def test_hold_true():
"""
test aptpkg.hold() when passing in the name of a package
and test is True
"""
set_sel = {"vim": {"old": "install", "new": "hold"}}
get_sel = {"hold": []}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel)
with patch_get_sel, patch_set_sel:
with patch.dict(aptpkg.__opts__, {"test": True}):
ret = aptpkg.hold("vim")
assert ret == {
"vim": {
"name": "vim",
"changes": {},
"result": None,
"comment": "Package vim is set to be held.",
}
}
def test_hold_already_set():
"""
test aptpkg.hold() when the pkg is already set
"""
get_sel = {"hold": ["vim"]}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
with patch_get_sel:
ret = aptpkg.hold("vim")
assert ret == {
"vim": {
"name": "vim",
"changes": {},
"result": True,
"comment": "Package vim is already set to be held.",
}
}
def test_hold_pkgs():
"""
test aptpkg.hold() when passing in pkgs
"""
get_sel = {"hold": []}
mock_set_sel = MagicMock()
mock_set_sel.side_effect = [
{"vim": {"old": "install", "new": "hold"}},
{"vim-nox": {"old": "install", "new": "hold"}},
]
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", mock_set_sel)
with patch_get_sel, patch_set_sel:
ret = aptpkg.hold(pkgs=["vim", "vim-nox"])
assert ret == {
"vim": {
"name": "vim",
"changes": {"old": "install", "new": "hold"},
"result": True,
"comment": "Package vim is now being held.",
},
"vim-nox": {
"name": "vim-nox",
"changes": {"old": "install", "new": "hold"},
"result": True,
"comment": "Package vim-nox is now being held.",
},
}
def test_unhold():
"""
test aptpkg.unhold when passing pacakge as name
"""
set_sel = {"vim": {"old": "hold", "new": "install"}}
get_sel = {"hold": ["vim"]}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel)
with patch_get_sel, patch_set_sel:
ret = aptpkg.unhold("vim")
assert ret == {
"vim": {
"name": "vim",
"changes": {"old": "hold", "new": "install"},
"result": True,
"comment": "Package vim is no longer being held.",
}
}
def test_unhold_no_name_pkgs():
"""
test aptpkg.unhold when we do not pass in a name or list of pkgs
"""
with pytest.raises(SaltInvocationError) as err:
aptpkg.unhold()
assert err.value.message == "One of name, pkgs, or sources must be specified."
def test_unhold_pkgs_sources():
"""
test aptpkg.unhold when we we set sources and a list of pkgs.
"""
with pytest.raises(SaltInvocationError) as err:
aptpkg.unhold(
pkgs=["vim", "apache2"], sources=["http://source1", "http://source2"]
)
assert err.value.message == "Only one of pkgs or sources can be specified."
@pytest.mark.parametrize(
"sources",
[
[
OrderedDict(
[
(
"vim",
"https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb",
)
]
)
],
[
(
"vim",
"https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb",
)
],
],
)
def test_unhold_sources(sources):
"""
test aptpkg.unhold when using sources
"""
set_sel = {"vim": {"old": "hold", "new": "install"}}
get_sel = {"hold": ["vim"]}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel)
with patch_get_sel, patch_set_sel:
ret = aptpkg.unhold(sources=sources)
assert ret == {
"vim": {
"name": "vim",
"changes": {"old": "hold", "new": "install"},
"result": True,
"comment": "Package vim is no longer being held.",
}
}
def test_unhold_true():
"""
test aptpkg.unhold() when passing in the name of a package
and test is True
"""
set_sel = {"vim": {"old": "install", "new": "hold"}}
get_sel = {"hold": ["vim"]}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel)
with patch_get_sel, patch_set_sel:
with patch.dict(aptpkg.__opts__, {"test": True}):
ret = aptpkg.unhold("vim")
assert ret == {
"vim": {
"name": "vim",
"changes": {},
"result": None,
"comment": "Package vim is set not to be held.",
}
}
def test_unhold_already_set():
"""
test aptpkg.unhold() when the pkg is already set
"""
get_sel = {"install": ["vim"]}
patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel)
with patch_get_sel:
ret = aptpkg.unhold("vim")
assert ret == {
"vim": {
"name": "vim",
"changes": {},
"result": True,
"comment": "Package vim is already set not to be held.",
}
}
def test_unhold_pkgs():
"""
test aptpkg.hold() when passing in pkgs
"""
mock_get_sel = MagicMock()
mock_get_sel.side_effect = [{"hold": ["vim"]}, {"hold": ["vim-nox"]}]
mock_set_sel = MagicMock()
mock_set_sel.side_effect = [
{"vim": {"old": "hold", "new": "install"}},
{"vim-nox": {"old": "hold", "new": "install"}},
]
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
patch_set_sel = patch("salt.modules.aptpkg.set_selections", mock_set_sel)
with patch_get_sel, patch_set_sel:
ret = aptpkg.unhold(pkgs=["vim", "vim-nox"])
assert ret == {
"vim": {
"name": "vim",
"changes": {"old": "hold", "new": "install"},
"result": True,
"comment": "Package vim is no longer being held.",
},
"vim-nox": {
"name": "vim-nox",
"changes": {"old": "hold", "new": "install"},
"result": True,
"comment": "Package vim-nox is no longer being held.",
},
}
def test_get_key_from_id_keylength_not_valid(tmp_path, caplog):
"""
test _get_key_from_id when the keyid lenght is not valid
"""
ret = aptpkg._get_key_from_id(tmp_path, "FBB754512")
assert ret is False
assert "The keyid needs to be either 8 or 16 characters" in caplog.text
def test_get_key_from_id_not_added(tmp_path, caplog):
"""
test _get_key_from_id when the keyfile is not added
"""
ret = aptpkg._get_key_from_id(tmp_path, "FBB75451")
assert ret is False
assert "Could not find the key file for keyid" in caplog.text
def test_del_repo_key_keydir_doesnotexist(tmp_path, caplog):
"""
test del_repo_key when keydir does not exist and aptkey is False
"""
ret = aptpkg.del_repo_key(
keyid="0E08A149DE57BFBE", keydir=str(tmp_path / "keydir"), aptkey=False
)
assert ret is False
assert "does not exist. Please create this directory" in caplog.text
def test_del_repo_key_keyid_doesnotexist(tmp_path, caplog):
"""
test del_repo_key when keyid is not passed in
"""
with patch("salt.utils.path.which", return_value=False):
with pytest.raises(SaltInvocationError) as err:
ret = aptpkg.del_repo_key(keydir=tmp_path, aptkey=False)
assert err.value.message == "keyid or keyid_ppa and PPA name must be passed"
def test_del_repo_key_keyfile_doesnotexist(tmp_path, caplog):
"""
test del_repo_key when keyfile does not exist
"""
with patch("salt.utils.path.which", return_value=False):
ret = aptpkg.del_repo_key(
keyid="0E08A149DE57BFBE", keydir=tmp_path, aptkey=False
)
assert ret is False
def test_set_selections():
"""
test set_selections() with valid state
"""
pkg = "salt-minion"
mock_get_sel = MagicMock(
return_value={
"install": ["adduser", pkg, "apparmor"],
"deinstall": ["python3-json-pointer"],
}
)
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
mock_call_apt = MagicMock(
return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""}
)
patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt)
patch_opts = patch.dict(aptpkg.__opts__, {"test": False})
with patch_get_sel, patch_call_apt, patch_opts:
ret = aptpkg.set_selections(selection=f'{{"hold": [{pkg}]}}')
assert ret == {pkg: {"old": "install", "new": "hold"}}
def test_set_selections_no_path_selection():
"""
test set_selections() when path or selection are not passed
"""
pkg = "salt-minion"
mock_get_sel = MagicMock(
return_value={
"install": ["adduser", pkg, "apparmor"],
"deinstall": ["python3-json-pointer"],
}
)
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
mock_call_apt = MagicMock(
return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""}
)
patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt)
patch_opts = patch.dict(aptpkg.__opts__, {"test": False})
with patch_get_sel, patch_call_apt, patch_opts:
ret = aptpkg.set_selections()
assert ret == {}
def test_set_selections_path_and_selection(tmp_path):
"""
test set_selections() when path and selection are passed
"""
pkg = "salt-minion"
mock_get_sel = MagicMock(
return_value={
"install": ["adduser", pkg, "apparmor"],
"deinstall": ["python3-json-pointer"],
}
)
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
mock_call_apt = MagicMock(
return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""}
)
patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt)
patch_opts = patch.dict(aptpkg.__opts__, {"test": False})
with patch_get_sel, patch_call_apt, patch_opts:
with pytest.raises(SaltInvocationError) as err:
ret = aptpkg.set_selections(selection=f'{{"hold": [{pkg}]}}', path=tmp_path)
assert "The 'selection' and 'path' arguments" in err.value.message
def test_set_selections_invalid_yaml():
"""
test set_selections() with invalid yaml with selections
"""
pkg = "salt-minion"
mock_get_sel = MagicMock(
return_value={
"install": ["adduser", pkg, "apparmor"],
"deinstall": ["python3-json-pointer"],
}
)
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
mock_call_apt = MagicMock(
return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""}
)
patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt)
patch_opts = patch.dict(aptpkg.__opts__, {"test": False})
with patch_get_sel, patch_call_apt, patch_opts:
with pytest.raises(SaltInvocationError) as err:
aptpkg.set_selections(selection='{{"hold": [{pkg}]}')
assert "Improperly-formatted selection" in err.value.message
def test_set_selections_path(tmp_path):
"""
test set_selections() with path
"""
pkg = "salt-minion"
select_file = tmp_path / "select"
mock_get_sel = MagicMock(
return_value={
"install": ["adduser", pkg, "apparmor"],
"deinstall": ["python3-json-pointer"],
}
)
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
mock_call_apt = MagicMock(
return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""}
)
patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt)
patch_opts = patch.dict(aptpkg.__opts__, {"test": False})
patch_salt = patch.dict(
aptpkg.__salt__, {"cp.cache_file": MagicMock(return_value=select_file)}
)
with salt.utils.files.fopen(select_file, "w") as fp:
fp.write("salt-minion hold\n adduser hold")
with patch_get_sel, patch_call_apt, patch_opts, patch_salt:
ret = aptpkg.set_selections(path=str(select_file))
assert ret == {
pkg: {"old": "install", "new": "hold"},
"adduser": {"old": "install", "new": "hold"},
}
def test_set_selections_invalid_state():
"""
test set_selections() with invalid state
"""
pkg = "salt-minion"
mock_get_sel = MagicMock(
return_value={
"install": ["adduser", pkg, "apparmor"],
"deinstall": ["python3-json-pointer"],
}
)
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
mock_call_apt = MagicMock(
return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""}
)
patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt)
patch_opts = patch.dict(aptpkg.__opts__, {"test": False})
with patch_get_sel, patch_call_apt, patch_opts:
with pytest.raises(SaltInvocationError) as err:
aptpkg.set_selections(selection=f'{{"doesnotexist": [{pkg}]}}')
assert err.value.message == "Invalid state(s): doesnotexist"
def test_set_selections_test():
"""
test set_selections() with valid state and test is True in opts
"""
pkg = "salt-minion"
mock_get_sel = MagicMock(
return_value={
"install": ["adduser", pkg, "apparmor"],
"deinstall": ["python3-json-pointer"],
}
)
patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel)
mock_call_apt = MagicMock(
return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""}
)
patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt)
patch_opts = patch.dict(aptpkg.__opts__, {"test": True})
with patch_get_sel, patch_call_apt, patch_opts:
ret = aptpkg.set_selections(selection=f'{{"hold": [{pkg}]}}')
assert ret == {}
def test_latest_version_calls_aptcache_once_per_run():
"""
Performance Test - don't call apt-cache once for each pkg, call once and parse output

View file

@ -0,0 +1,201 @@
"""
Test functions in state.py that are not a part of a class
"""
import pytest
import salt.state
from salt.utils.odict import OrderedDict
pytestmark = [
pytest.mark.core_test,
]
def test_state_args():
"""
Testing state.state_args when this state is being used:
/etc/foo.conf:
file.managed:
- contents: "blah"
- mkdirs: True
- user: ch3ll
- group: ch3ll
- mode: 755
/etc/bar.conf:
file.managed:
- use:
- file: /etc/foo.conf
"""
id_ = "/etc/bar.conf"
state = "file"
high = OrderedDict(
[
(
"/etc/foo.conf",
OrderedDict(
[
(
"file",
[
OrderedDict([("contents", "blah")]),
OrderedDict([("mkdirs", True)]),
OrderedDict([("user", "ch3ll")]),
OrderedDict([("group", "ch3ll")]),
OrderedDict([("mode", 755)]),
"managed",
{"order": 10000},
],
),
("__sls__", "test"),
("__env__", "base"),
]
),
),
(
"/etc/bar.conf",
OrderedDict(
[
(
"file",
[
OrderedDict(
[
(
"use",
[OrderedDict([("file", "/etc/foo.conf")])],
)
]
),
"managed",
{"order": 10001},
],
),
("__sls__", "test"),
("__env__", "base"),
]
),
),
]
)
ret = salt.state.state_args(id_, state, high)
assert ret == {"order", "use"}
def test_state_args_id_not_high():
"""
Testing state.state_args when id_ is not in high
"""
id_ = "/etc/bar.conf2"
state = "file"
high = OrderedDict(
[
(
"/etc/foo.conf",
OrderedDict(
[
(
"file",
[
OrderedDict([("contents", "blah")]),
OrderedDict([("mkdirs", True)]),
OrderedDict([("user", "ch3ll")]),
OrderedDict([("group", "ch3ll")]),
OrderedDict([("mode", 755)]),
"managed",
{"order": 10000},
],
),
("__sls__", "test"),
("__env__", "base"),
]
),
),
(
"/etc/bar.conf",
OrderedDict(
[
(
"file",
[
OrderedDict(
[
(
"use",
[OrderedDict([("file", "/etc/foo.conf")])],
)
]
),
"managed",
{"order": 10001},
],
),
("__sls__", "test"),
("__env__", "base"),
]
),
),
]
)
ret = salt.state.state_args(id_, state, high)
assert ret == set()
def test_state_args_state_not_high():
"""
Testing state.state_args when state is not in high date
"""
id_ = "/etc/bar.conf"
state = "file2"
high = OrderedDict(
[
(
"/etc/foo.conf",
OrderedDict(
[
(
"file",
[
OrderedDict([("contents", "blah")]),
OrderedDict([("mkdirs", True)]),
OrderedDict([("user", "ch3ll")]),
OrderedDict([("group", "ch3ll")]),
OrderedDict([("mode", 755)]),
"managed",
{"order": 10000},
],
),
("__sls__", "test"),
("__env__", "base"),
]
),
),
(
"/etc/bar.conf",
OrderedDict(
[
(
"file",
[
OrderedDict(
[
(
"use",
[OrderedDict([("file", "/etc/foo.conf")])],
)
]
),
"managed",
{"order": 10001},
],
),
("__sls__", "test"),
("__env__", "base"),
]
),
),
]
)
ret = salt.state.state_args(id_, state, high)
assert ret == set()

View file

@ -1302,3 +1302,37 @@ def test_check_refresh_pillar(minion_opts, caplog):
state_obj.check_refresh(data, ret)
mock_refresh.assert_called_once()
assert "Refreshing pillar..." in caplog.text
def test_module_refresh_runtimeerror(minion_opts, caplog):
"""
test module_refresh when runtimerror occurs
"""
mock_importlib = MagicMock()
mock_importlib.side_effect = RuntimeError("Error")
patch_importlib = patch("importlib.reload", mock_importlib)
patch_pillar = patch("salt.state.State._gather_pillar", return_value="")
with patch_importlib, patch_pillar:
state_obj = salt.state.State(minion_opts)
state_obj.module_refresh()
assert (
"Error encountered during module reload. Modules were not reloaded."
in caplog.text
)
def test_module_refresh_typeerror(minion_opts, caplog):
"""
test module_refresh when typeerror occurs
"""
mock_importlib = MagicMock()
mock_importlib.side_effect = TypeError("Error")
patch_importlib = patch("importlib.reload", mock_importlib)
patch_pillar = patch("salt.state.State._gather_pillar", return_value="")
with patch_importlib, patch_pillar:
state_obj = salt.state.State(minion_opts)
state_obj.module_refresh()
assert (
"Error encountered during module reload. Modules were not reloaded."
in caplog.text
)

View file

@ -8,7 +8,7 @@ import textwrap
import pytest # pylint: disable=unused-import
import salt.state
from salt.utils.odict import OrderedDict
from salt.utils.odict import DefaultOrderedDict, OrderedDict
log = logging.getLogger(__name__)
@ -352,3 +352,68 @@ def test_dont_extend_in_excluded_sls_file(highstate, state_tree_dir):
)
]
)
def test_verify_tops(highstate):
"""
test basic functionality of verify_tops
"""
tops = DefaultOrderedDict(OrderedDict)
tops["base"] = OrderedDict([("*", ["test", "test2"])])
matches = highstate.verify_tops(tops)
# [] means there where no errors when verifying tops
assert matches == []
def test_verify_tops_not_dict(highstate):
"""
test verify_tops when top data is not a dict
"""
matches = highstate.verify_tops(["base", "test", "test2"])
assert matches == ["Top data was not formed as a dict"]
def test_verify_tops_env_empty(highstate):
"""
test verify_tops when the environment is empty
"""
tops = DefaultOrderedDict(OrderedDict)
tops[""] = OrderedDict([("*", ["test", "test2"])])
matches = highstate.verify_tops(tops)
assert matches == ["Empty saltenv statement in top file"]
def test_verify_tops_sls_not_list(highstate):
"""
test verify_tops when the sls files are not a list
"""
tops = DefaultOrderedDict(OrderedDict)
tops["base"] = OrderedDict([("*", "test test2")])
matches = highstate.verify_tops(tops)
# [] means there where no errors when verifying tops
assert matches == ["Malformed topfile (state declarations not formed as a list)"]
def test_verify_tops_match(highstate):
"""
test basic functionality of verify_tops when using a matcher
like `match: glob`.
"""
tops = DefaultOrderedDict(OrderedDict)
tops["base"] = OrderedDict(
[("*", [OrderedDict([("match", "glob")]), "test", "test2"])]
)
matches = highstate.verify_tops(tops)
# [] means there where no errors when verifying tops
assert matches == []
def test_verify_tops_match_none(highstate):
"""
test basic functionality of verify_tops when using a matcher
when it is empty, like `match: ""`.
"""
tops = DefaultOrderedDict(OrderedDict)
tops["base"] = OrderedDict([("*", [OrderedDict([("match", "")]), "test", "test2"])])
matches = highstate.verify_tops(tops)
assert "Improperly formatted top file matcher in saltenv" in matches[0]

View file

@ -0,0 +1,525 @@
import pytest
import salt.states.saltmod as saltmod
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules(minion_opts):
return {
saltmod: {
"__opts__": minion_opts,
},
}
def test_function():
"""
Test to execute a single module function on a remote
minion via salt or salt-ssh
"""
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {},
"result": None,
"comment": f"Function state would be executed on target {tgt}",
}
with patch.dict(saltmod.__opts__, {"test": True}):
ret = saltmod.function(name, tgt)
assert ret == expected
expected.update(
{
"result": True,
"changes": {"ret": {tgt: ""}},
"comment": (f"Function ran successfully. Function state ran on {tgt}."),
}
)
with patch.dict(saltmod.__opts__, {"test": False}):
mock_ret = {"larry": {"ret": "", "retcode": 0, "failed": False}}
mock_cmd = MagicMock(return_value=mock_ret)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock_cmd}):
ret = saltmod.function(name, tgt)
assert ret == expected
def test_function_when_no_minions_match():
"""
Test to execute a single module function on a remote
minion via salt or salt-ssh
"""
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {},
"result": False,
"comment": "No minions responded",
}
with patch.dict(saltmod.__opts__, {"test": False}):
with patch.dict(saltmod.__salt__, {"saltutil.cmd": MagicMock(return_value={})}):
ret = saltmod.function(name, tgt)
assert ret == expected
def test_function_ssh():
"""
Test saltmod function passes roster to saltutil.cmd
"""
cmd_mock = MagicMock()
with patch.dict(saltmod.__opts__, {"test": False}), patch.dict(
saltmod.__salt__, {"saltutil.cmd": cmd_mock}
):
saltmod.function("state", tgt="*", ssh=True, roster="my_roster")
assert "roster" in cmd_mock.call_args.kwargs
assert cmd_mock.call_args.kwargs["roster"] == "my_roster"
def test_arg():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {"ret": {tgt: ""}},
"result": True,
"comment": f"Function ran successfully. Function state ran on {tgt}.",
"warnings": ["Please specify 'arg' as a list of arguments."],
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
args = ["foo", "bar"]
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, arg=" ".join(args))
assert ret == expected
mock.assert_called_once()
assert "arg" in mock.call_args.kwargs
assert mock.call_args.kwargs["arg"] == args
expected.pop("warnings")
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
args = ["foo", "bar"]
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, arg=args)
assert ret == expected
mock.assert_called_once()
assert "arg" in mock.call_args.kwargs
assert mock.call_args.kwargs["arg"] == args
def test_batch():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {"ret": {tgt: ""}},
"result": True,
"comment": f"Function ran successfully. Function state ran on {tgt}.",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
batch = "yes"
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, batch=batch)
assert ret == expected
mock.assert_called_once()
assert "batch" in mock.call_args.kwargs
assert mock.call_args.kwargs["batch"] == batch
batch = ["yes", "no"]
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, batch=batch)
assert ret == expected
assert "batch" in mock.call_args.kwargs
assert mock.call_args.kwargs["batch"] == str(batch)
def test_subset():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {"ret": {tgt: ""}},
"result": True,
"comment": f"Function ran successfully. Function state ran on {tgt}.",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
subset = "yes"
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, subset=subset)
assert ret == expected
mock.assert_called_once()
assert "subset" in mock.call_args.kwargs
assert mock.call_args.kwargs["subset"] == subset
def test_ret_config():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {"ret": {tgt: ""}},
"result": True,
"comment": f"Function ran successfully. Function state ran on {tgt}.",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
ret_config = {"yes": "no"}
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, ret_config=ret_config)
assert ret == expected
mock.assert_called_once()
assert "ret_config" in mock.call_args.kwargs
assert mock.call_args.kwargs["ret_config"] == ret_config
def test_ret_kwargs():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {"ret": {tgt: ""}},
"result": True,
"comment": f"Function ran successfully. Function state ran on {tgt}.",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
ret_kwargs = {"yes": "no"}
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, ret_kwargs=ret_kwargs)
assert ret == expected
mock.assert_called_once()
assert "ret_kwargs" in mock.call_args.kwargs
assert mock.call_args.kwargs["ret_kwargs"] == ret_kwargs
def test_failhard():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {"ret": {tgt: ""}},
"result": True,
"comment": f"Function ran successfully. Function state ran on {tgt}.",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, failhard=True)
assert ret == expected
mock.assert_called_once()
assert "failhard" in mock.call_args.kwargs
assert mock.call_args.kwargs["failhard"] is True
with patch.dict(saltmod.__opts__, {"test": False, "failhard": True}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
},
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt)
assert ret == expected
mock.assert_called_once()
assert "failhard" in mock.call_args.kwargs
assert mock.call_args.kwargs["failhard"] is True
def test_fail_minions():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {
"ret": {
tgt: "",
"red": "red",
"green": "green",
"blue": "blue",
},
},
"result": True,
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
"red": {
"ret": "red",
"retcode": 0,
"failed": False,
},
"green": {
"ret": "green",
"retcode": 0,
"failed": False,
},
"blue": {
"ret": "blue",
"retcode": 0,
"failed": False,
},
},
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, fail_minions="red")
ret_comment = ret.pop("comment")
assert ret == expected
assert "Function ran successfully. Function state ran on " in ret_comment
for part in (tgt, "red", "green", "blue"):
assert part in ret_comment
mock.assert_called_once()
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, fail_minions="red,green")
ret_comment = ret.pop("comment")
assert ret == expected
assert "Function ran successfully. Function state ran on " in ret_comment
for part in (tgt, "red", "green", "blue"):
assert part in ret_comment
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, fail_minions=["red", "green"])
ret_comment = ret.pop("comment")
assert ret == expected
assert "Function ran successfully. Function state ran on " in ret_comment
for part in (tgt, "red", "green", "blue"):
assert part in ret_comment
expected["warnings"] = [
"'fail_minions' needs to be a list or a comma separated string. Ignored."
]
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, fail_minions=())
ret_comment = ret.pop("comment")
assert ret == expected
assert "Function ran successfully. Function state ran on " in ret_comment
for part in (tgt, "red", "green", "blue"):
assert part in ret_comment
expected.pop("warnings")
expected["changes"]["ret"]["red"] = False
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
"red": {
"ret": "red",
"retcode": 0,
"failed": True,
},
"green": {
"ret": "green",
"retcode": 0,
"failed": False,
},
"blue": {
"ret": "blue",
"retcode": 0,
"failed": False,
},
},
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, fail_minions="red")
ret_comment = ret.pop("comment")
assert ret == expected
assert "Function ran successfully. Function state ran on " in ret_comment
for part in (tgt, "red", "green", "blue"):
assert part in ret_comment
mock.assert_called_once()
expected["result"] = False
expected["changes"]["ret"]["green"] = False
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
"red": {
"ret": "red",
"retcode": 0,
"failed": True,
},
"green": {
"ret": "green",
"retcode": 0,
"failed": True,
},
"blue": {
"ret": "blue",
"retcode": 0,
"failed": False,
},
},
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, fail_minions="red")
ret_comment = ret.pop("comment")
assert ret == expected
assert "Running function state failed on minions: green " in ret_comment
assert "Function state ran on " in ret_comment
for part in (tgt, "red", "green", "blue"):
assert part in ret_comment
mock.assert_called_once()
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
tgt: {
"ret": "",
"retcode": 0,
"failed": False,
},
"red": {
"ret": "red",
"retcode": 1,
"failed": True,
},
"green": {
"ret": "green",
"retcode": 1,
"failed": True,
},
"blue": {
"ret": "blue",
"retcode": 0,
"failed": False,
},
},
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, fail_minions="red")
ret_comment = ret.pop("comment")
assert ret == expected
try:
assert (
"Running function state failed on minions: green, red"
in ret_comment
)
except AssertionError:
assert (
"Running function state failed on minions: red, green"
in ret_comment
)
assert "Function state ran on " in ret_comment
for part in (tgt, "red", "green", "blue"):
assert part in ret_comment
mock.assert_called_once()
def test_exception_raised():
name = "state"
tgt = "larry"
expected = {
"name": name,
"changes": {},
"result": False,
"comment": "I'm an exception!",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(side_effect=Exception("I'm an exception!"))
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.function(name, tgt, failhard=True)
assert ret == expected
mock.assert_called_once()
assert "failhard" in mock.call_args.kwargs
assert mock.call_args.kwargs["failhard"] is True

View file

@ -0,0 +1,253 @@
import pytest
import salt.exceptions
import salt.states.saltmod as saltmod
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules(minion_opts):
return {
saltmod: {
"__env__": "base",
"__opts__": minion_opts,
},
}
def test_runners():
name = "runner-name"
runner_1 = "runner-1"
runner_1_ret = {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name 1_|-show_notification": {
"comment": f"Notify me: {runner_1}",
"name": "this is a name 1",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
}
expected = {
"name": name,
"changes": {
"ret": {
runner_1: runner_1_ret,
}
},
"result": True,
"comment": "All runner functions executed successfully.",
}
mock = MagicMock(side_effect=[{"return": runner_1_ret}])
with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}):
ret = saltmod.parallel_runners(name, runner_1)
assert ret == expected
runner_2 = "runner-2"
runner_2_ret = {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name 2_|-show_notification": {
"comment": f"Notify me: {runner_2}",
"name": "this is a name 2",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
}
expected["changes"]["ret"][runner_2] = runner_2_ret
mock = MagicMock(side_effect=[{"return": runner_1_ret}, {"return": runner_2_ret}])
with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}):
ret = saltmod.parallel_runners(
name, {runner_1: {"name": name}, runner_2: {"name": name}}
)
assert ret == expected
expected = {
"name": name,
"result": False,
"changes": {},
"comment": "The runners parameter must be a string or dict.",
}
ret = saltmod.parallel_runners(name, [runner_1, runner_2])
assert ret == expected
def test_exception():
name = "runner-name"
runner_1 = "runner-1"
runner_1_ret = {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
}
expected = {
"name": name,
"result": False,
"changes": {},
"comment": "One of the runners raised an exception: An Exception!",
"success": False,
}
runner_2 = "runner-2"
mock = MagicMock(
side_effect=[
{"return": runner_1_ret},
salt.exceptions.SaltException("An Exception!"),
]
)
with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}):
ret = saltmod.parallel_runners(
name, {runner_1: {"name": name}, runner_2: {"name": name}}
)
assert ret == expected
def test_failed():
name = "runner-name"
runner_1 = "runner-1"
runner_1_ret = {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name 1_|-show_notification": {
"comment": f"Notify me: {runner_1}",
"name": "this is a name 1",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {"foo": "bar"},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
"exit_code": 1,
}
runner_2 = "runner-2"
runner_2_ret = {
"jid": "20170406104341210934",
"retcode": 1,
"ret": {
"test_|-notify_me_|-this is a name 2_|-show_notification": {
"comment": f"Notify me: {runner_2}",
"name": "this is a name 2",
"start_time": "10:43:41.487565",
"result": False,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
"exit_code": 0,
}
expected = {
"name": name,
"changes": {
"ret": {
runner_1: runner_1_ret,
}
},
"result": False,
"comment": f"Runner {runner_1} failed.",
}
mock = MagicMock(side_effect=[{"return": runner_1_ret}])
with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}):
ret = saltmod.parallel_runners(name, runner_1)
assert ret == expected
expected["changes"]["ret"][runner_2] = runner_2_ret
mock = MagicMock(side_effect=[{"return": runner_1_ret}, {"return": runner_2_ret}])
with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}):
ret = saltmod.parallel_runners(
name, {runner_1: {"name": name}, runner_2: {"name": name}}
)
assert ret == expected
runner_3 = "runner-3"
runner_3_ret = {
"jid": "20170406104341210934",
"retcode": 1,
"ret": {
"test_|-notify_me_|-this is a name 2_|-show_notification": {
"comment": f"Notify me: {runner_2}",
"name": "this is a name 2",
"start_time": "10:43:41.487565",
"result": False,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
"exit_code": 1,
}
expected["changes"]["ret"][runner_3] = runner_3_ret
expected.pop("comment")
mock = MagicMock(
side_effect=[
{"return": runner_1_ret},
{"return": runner_2_ret},
{"return": runner_3_ret},
]
)
with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}):
ret = saltmod.parallel_runners(
name,
{
runner_1: {"name": name},
runner_2: {"name": name},
runner_3: {"name": name},
},
)
ret_comment = ret.pop("comment")
assert ret == expected
assert "Runners " in ret_comment
assert " failed." in ret_comment
assert runner_1 in ret_comment
assert runner_3 in ret_comment
assert runner_2 not in ret_comment

View file

@ -0,0 +1,48 @@
import pytest
import salt.states.saltmod as saltmod
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules(minion_opts):
return {
saltmod: {
"__env__": "base",
"__opts__": minion_opts,
},
}
def test_test_mode():
name = "bah"
expected = {
"name": name,
"changes": {},
"result": None,
"comment": f"Runner function '{name}' would be executed.",
}
with patch.dict(saltmod.__opts__, {"test": True}):
ret = saltmod.runner(name)
assert ret == expected
def test_runner():
"""
Test to execute a runner module on the master
"""
name = "state"
expected = {
"changes": {"return": True},
"name": "state",
"result": True,
"comment": "Runner function 'state' executed.",
}
with patch.dict(
saltmod.__salt__, {"saltutil.runner": MagicMock(return_value={"return": True})}
):
ret = saltmod.runner(name)
assert ret == expected

View file

@ -0,0 +1,980 @@
import pytest
import salt.modules.saltutil
import salt.states.saltmod as saltmod
import salt.utils.event
import salt.utils.jid
import salt.utils.state
from tests.support.mock import MagicMock, create_autospec, patch
@pytest.fixture
def configure_loader_modules(minion_opts):
return {
saltmod: {
"__opts__": minion_opts,
"__salt__": {"saltutil.cmd": MagicMock()},
"__utils__": {"state.check_result": salt.utils.state.check_result},
},
}
@pytest.fixture
def fake_cmd():
_fake_cmd = create_autospec(salt.modules.saltutil.cmd)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": _fake_cmd}):
yield _fake_cmd
@pytest.mark.parametrize(
"exclude",
[True, False],
)
def test_exclude_parameter_gets_passed(exclude, fake_cmd):
"""
Smoke test for for salt.states.statemod.state(). Ensures that we
don't take an exception if optional parameters are not specified in
__opts__ or __env__.
"""
args = ("webserver_setup", "webserver2")
expected_exclude = exclude
kwargs = {
"tgt_type": "glob",
"exclude": expected_exclude,
"highstate": True,
}
saltmod.state(*args, **kwargs)
call = fake_cmd.call_args[1]
assert call["kwarg"]["exclude"] == expected_exclude
def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd):
# Make sure we don't barf on existing behavior
args = ("webserver_setup", "webserver2")
kwargs_without_exclude = {
"tgt_type": "glob",
"highstate": True,
}
saltmod.state(*args, **kwargs_without_exclude)
call = fake_cmd.call_args[1]
assert "exclude" not in call["kwarg"]
def test_state_smoke_test():
"""
Smoke test for for salt.states.statemod.state(). Ensures that we
don't take an exception if optional parameters are not specified in
__opts__ or __env__.
"""
args = ("webserver_setup", "webserver2")
kwargs = {
"tgt_type": "glob",
"fail_minions": None,
"pillar": None,
"top": None,
"batch": None,
"orchestration_jid": None,
"sls": "vroom",
"queue": False,
"concurrent": False,
"highstate": None,
"expr_form": None,
"ret": "",
"ssh": False,
"timeout": None,
"test": False,
"allow_fail": 0,
"saltenv": None,
"expect_minions": False,
}
with patch.dict(saltmod.__opts__, {"id": "webserver2"}):
ret = saltmod.state(*args, **kwargs)
expected = {
"comment": "States ran successfully.",
"changes": {},
"name": "webserver_setup",
"result": True,
}
assert ret == expected
def test_state():
"""
Test to invoke a state run on a given target
"""
name = "state"
tgt = "minion1"
expected = {
"name": name,
"changes": {},
"result": False,
"comment": "No highstate or sls specified, no execution made",
}
ret = saltmod.state(name, tgt)
assert ret == expected
expected.update({"comment": "Must pass in boolean for value of 'concurrent'"})
ret = saltmod.state(name, tgt, highstate=True, concurrent="a")
assert ret == expected
expected.update(
{
"result": True,
"comment": "States ran successfully.",
}
)
with patch.dict(saltmod.__opts__, {"test": True}):
ret = saltmod.state(name, tgt, highstate=True)
assert ret == expected
silver_ret = {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {"foo": "bar"},
"__id__": "notify_me",
}
}
expected.update(
{
"comment": "States ran successfully. Updating silver.",
"result": None,
"__jid__": "20170406104341210934",
"changes": {
"out": "highstate",
"ret": {"silver": silver_ret},
},
}
)
with patch.dict(saltmod.__opts__, {"test": True}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": silver_ret,
"out": "highstate",
}
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True)
assert ret == expected
mock.assert_called_once()
expected.update(
{
"comment": "States ran successfully. No changes made to silver.",
"result": True,
"__jid__": "20170406104341210934",
"changes": {},
}
)
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
}
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True)
assert ret == expected
mock.assert_called_once()
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, top="the-top")
assert "arg" in mock.call_args.kwargs
assert "the-top" in mock.call_args.kwargs["arg"]
for pass_kw in ("ret_config", "ret_kwargs", "batch", "subset"):
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
kwargs = {pass_kw: f"{pass_kw}_value"}
ret = saltmod.state(name, tgt, highstate=True, **{pass_kw: kwargs})
assert pass_kw in mock.call_args.kwargs
if pass_kw == "batch":
assert mock.call_args.kwargs[pass_kw] == str(kwargs)
else:
assert mock.call_args.kwargs[pass_kw] == kwargs
assert ret == expected
for pass_kw in ("pillar", "pillarenv", "saltenv"):
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
kwargs = {pass_kw: f"{pass_kw}_value"}
ret = saltmod.state(name, tgt, highstate=True, **{pass_kw: kwargs})
assert "kwarg" in mock.call_args.kwargs
assert pass_kw in mock.call_args.kwargs["kwarg"]
assert mock.call_args.kwargs["kwarg"][pass_kw] == kwargs
assert ret == expected
test_batch_return = {
"minion1": {
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
},
"retcode": 0,
},
"out": "highstate",
},
"minion2": {
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
},
"retcode": 0,
},
"out": "highstate",
},
"minion3": {
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
},
"retcode": 0,
},
"out": "highstate",
},
}
expected.update(
{
"comment": (
"States ran successfully. No changes made to minion1, minion3,"
" minion2."
)
}
)
del expected["__jid__"]
with patch.dict(saltmod.__opts__, {"test": False}):
with patch.dict(
saltmod.__salt__,
{"saltutil.cmd": MagicMock(return_value=test_batch_return)},
):
state_run = saltmod.state(name, tgt, highstate=True)
# Test return without checking the comment contents. Comments are tested later.
comment = state_run.pop("comment")
expected.pop("comment")
assert state_run == expected
# Check the comment contents in a non-order specific way (ordering fails sometimes on PY3)
assert "States ran successfully. No changes made to" in comment
for minion in ["minion1", "minion2", "minion3"]:
assert minion in comment
def test_state_masterless():
"""
Test to invoke a state run masterless
"""
name = "state"
minion_id = "masterless-minion"
expected = {
"name": name,
"changes": {},
"comment": f"States ran successfully. No changes made to {minion_id}.",
"result": True,
}
with patch.dict(
saltmod.__opts__,
{"test": False, "__role": "minion", "file_client": "local", "id": minion_id},
):
mock = MagicMock(
return_value={
minion_id: {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
}
}
)
with patch.dict(saltmod.__salt__, {"state.highstate": mock}):
ret = saltmod.state(name, minion_id, highstate=True)
assert ret == expected
mock.assert_called_once()
with patch.dict(saltmod.__salt__, {"state.top": mock}):
ret = saltmod.state(name, minion_id, top="the-top")
assert ret == expected
assert "topfn" in mock.call_args.kwargs
assert mock.call_args.kwargs["topfn"] == "the-top"
with patch.dict(saltmod.__salt__, {"state.sls": mock}):
ret = saltmod.state(name, minion_id, sls="the-sls")
assert ret == expected
assert "mods" in mock.call_args.kwargs
assert mock.call_args.kwargs["mods"] == "the-sls"
with patch.dict(saltmod.__salt__, {"state.sls": mock}):
ret = saltmod.state(name, minion_id, sls=["the-sls-1", "the-sls-2"])
assert ret == expected
assert "mods" in mock.call_args.kwargs
assert mock.call_args.kwargs["mods"] == "the-sls-1,the-sls-2"
def test_state_failhard():
name = "state"
tgt = "minion1"
expected = {
"name": name,
"changes": {},
"comment": "States ran successfully. No changes made to silver.",
"result": True,
"__jid__": "20170406104341210934",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
}
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True, failhard=True)
assert ret == expected
mock.assert_called_once()
assert "failhard" in mock.call_args.kwargs
assert mock.call_args.kwargs["failhard"] is True
with patch.dict(saltmod.__opts__, {"test": False, "failhard": True}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
}
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True)
assert ret == expected
mock.assert_called_once()
assert "failhard" in mock.call_args.kwargs
assert mock.call_args.kwargs["failhard"] is True
def test_state_no_returns():
name = "state"
tgt = "minion1"
expected = {
"name": name,
"changes": {},
"result": False,
"comment": "No minions returned",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(return_value={})
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True)
assert ret == expected
mock.assert_called_once()
def test_state_failed_and_expected_minions():
name = "state"
tgt = "minion1"
expected = {
"name": name,
"changes": {"out": "highstate", "ret": {"silver": False}},
"comment": "Run failed on minions: silver",
"result": False,
"__jid__": "20170406104341210934",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"gold": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
},
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True)
assert ret == expected
mock.assert_called_once()
expected.update(
{
"changes": {
"out": "highstate",
"ret": {"bronze": False, "charcoal": False, "silver": False},
},
"comment": "Run failed on minions: silver, bronze",
}
)
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"charcoal": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"bronze": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"gold": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
},
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True, fail_minions="charcoal")
ret_comment = ret.pop("comment")
expected.pop("comment")
assert ret == expected
# The order can be different, hence asserting like this
assert "Run failed on minions: " in ret_comment
assert "silver" in ret_comment
assert "bronze" in ret_comment
mock.assert_called_once()
expected.update(
{
"changes": {
"out": "highstate",
"ret": {"bronze": False, "charcoal": False, "silver": False},
},
"comment": "Run failed on minions: silver",
}
)
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"charcoal": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"bronze": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"gold": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
},
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(
name, tgt, highstate=True, fail_minions="bronze,charcoal"
)
assert ret == expected
mock.assert_called_once()
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(
name, tgt, highstate=True, fail_minions=["bronze", "charcoal"]
)
assert ret == expected
expected.pop("__jid__")
expected.update(
{
"result": True,
"changes": {},
"comment": "States ran successfully.",
"warnings": [
"'fail_minions' needs to be a list or a comma separated string. Ignored.",
],
}
)
ret = saltmod.state(name, tgt, highstate=True, fail_minions={})
assert ret == expected
def test_state_allow_fail():
name = "state"
tgt = "minion1"
expected = {
"name": name,
"changes": {"out": "highstate", "ret": {"silver": False}},
"comment": "States ran successfully. Updating silver. No changes made to gold.",
"result": True,
"__jid__": "20170406104341210934",
}
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"gold": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
},
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True, allow_fail=1)
assert ret == expected
mock.assert_called_once()
gold_ret = {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {"foo": "bar"},
"__id__": "notify_me",
}
}
expected["changes"]["ret"]["gold"] = gold_ret
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"gold": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": gold_ret,
"out": "highstate",
},
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True, allow_fail=1)
ret_comment = ret.pop("comment")
expected.pop("comment")
assert ret == expected
# The order can be different, hence asserting like this
assert "States ran successfully. Updating " in ret_comment
assert "silver" in ret_comment
assert "gold" in ret_comment
mock.assert_called_once()
expected.update(
{
"changes": {
"out": "highstate",
"ret": {"bronze": False, "charcoal": False, "silver": False},
},
"comment": "Run failed on minions: silver, bronze",
"result": False,
}
)
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"charcoal": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"bronze": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"failed": True,
"out": "highstate",
},
"gold": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
},
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
ret = saltmod.state(name, tgt, highstate=True, allow_fail=1)
ret_comment = ret.pop("comment")
expected.pop("comment")
assert ret == expected
# The order can be different, hence asserting like this
assert "Run failed on minions: " in ret_comment
assert "silver" in ret_comment
assert "bronze" in ret_comment
mock.assert_called_once()
expected = {
"name": name,
"changes": {},
"result": False,
"comment": "Passed invalid value for 'allow_fail', must be an int",
}
ret = saltmod.state(name, tgt, allow_fail="a")
assert ret == expected
def test_roster():
"""
Test saltmod state passes roster to saltutil.cmd
"""
cmd_mock = MagicMock()
with patch.dict(saltmod.__salt__, {"saltutil.cmd": cmd_mock}):
ret = saltmod.state(
"state.sls", tgt="*", ssh=True, highstate=True, roster="my_roster"
)
assert "roster" in cmd_mock.call_args.kwargs
assert cmd_mock.call_args.kwargs["roster"] == "my_roster"

View file

@ -0,0 +1,131 @@
import copy
import pytest
import salt.states.saltmod as saltmod
import salt.utils.state
from tests.support.mock import MagicMock, patch
class MockedEvent:
"""
Mocked event class
"""
def __init__(self, data):
self.full = None
self.flag = None
self._data = data
def get_event(self, full):
"""
Mock get_event method
"""
self.full = full
if self.flag:
return self._data
return None
def __enter__(self):
return self
def __exit__(self, *args):
pass
@pytest.fixture
def configure_loader_modules(minion_opts):
return {
saltmod: {
"__opts__": minion_opts,
},
}
def test_test_mode():
name = "presence"
event_id = "lost"
tgt = ["minion_1", "minion_2", "minion_3"]
expected = {
"name": name,
"changes": {},
"result": None,
"comment": f"Orchestration would wait for event '{name}'",
}
with patch.dict(saltmod.__opts__, {"test": True}):
ret = saltmod.wait_for_event(name, tgt, event_id=event_id, timeout=-1.0)
assert ret == expected
def test_wait_for_event():
"""
Test to watch Salt's event bus and block until a condition is met
"""
name = "state"
tgt = "minion1"
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "Timeout value reached.",
}
mocked_event = MockedEvent({"tag": name, "data": {}})
with patch.object(
salt.utils.event, "get_event", MagicMock(return_value=mocked_event)
):
with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}):
with patch("salt.states.saltmod.time.time", MagicMock(return_value=1.0)):
assert saltmod.wait_for_event(name, "salt", timeout=-1.0) == ret
mocked_event.flag = True
ret.update(
{"comment": "All events seen in 0.0 seconds.", "result": True}
)
assert saltmod.wait_for_event(name, "") == ret
ret.update({"comment": "Timeout value reached.", "result": False})
assert saltmod.wait_for_event(name, tgt, timeout=-1.0) == ret
def test_list_single_event():
"""
Test to watch Salt's event bus and block until a condition is met
"""
name = "presence"
event_id = "lost"
tgt = ["minion_1", "minion_2", "minion_3"]
expected = {
"name": name,
"changes": {},
"result": False,
"comment": "Timeout value reached.",
}
mocked_event = MockedEvent({"tag": name, "data": {"lost": tgt}})
with patch.object(
salt.utils.event, "get_event", MagicMock(return_value=mocked_event)
):
with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}):
with patch("salt.states.saltmod.time.time", MagicMock(return_value=1.0)):
expected.update({"comment": "Timeout value reached.", "result": False})
ret = saltmod.wait_for_event(name, tgt, event_id=event_id, timeout=-1.0)
assert ret == expected
mocked_event.flag = True
expected.update(
{
"name": name,
"changes": {"minions_seen": tgt},
"result": True,
"comment": "All events seen in 0.0 seconds.",
}
)
ret = saltmod.wait_for_event(
name, copy.deepcopy(tgt), event_id="lost", timeout=1.0
)
assert ret == expected

View file

@ -0,0 +1,68 @@
import pytest
import salt.states.saltmod as saltmod
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules(minion_opts):
return {
saltmod: {
"__env__": "base",
"__opts__": minion_opts,
},
}
def test_test_mode():
name = "bah"
expected = {
"name": name,
"changes": {},
"result": None,
"comment": f"Wheel function '{name}' would be executed.",
}
with patch.dict(saltmod.__opts__, {"test": True}):
ret = saltmod.wheel(name)
assert ret == expected
def test_wheel():
"""
Test to execute a wheel module on the master
"""
name = "state"
expected = {
"changes": {"return": True},
"name": "state",
"result": True,
"comment": "Wheel function 'state' executed.",
}
with patch.dict(
saltmod.__salt__, {"saltutil.wheel": MagicMock(return_value={"return": True})}
):
ret = saltmod.wheel(name)
assert ret == expected
def test_test_error_in_return():
name = "bah"
jid = "20170406104341210934"
func_ret = {"Error": "This is an Error!"}
expected = {
"name": name,
"changes": {"return": func_ret},
"result": False,
"comment": f"Wheel function '{name}' failed.",
"__jid__": jid,
}
mock = MagicMock(return_value={"return": func_ret, "jid": jid})
with patch.dict(saltmod.__salt__, {"saltutil.wheel": mock}):
ret = saltmod.wheel(name)
assert ret == expected
mock.assert_called_once()

View file

@ -1,55 +0,0 @@
import pytest
import salt.modules.saltutil as saltutil
import salt.states.saltmod as saltmod
from tests.support.mock import create_autospec, patch
@pytest.fixture
def configure_loader_modules():
return {saltmod: {"__opts__": {"__role": "testsuite"}}}
@pytest.fixture
def fake_cmd():
fake_cmd = create_autospec(saltutil.cmd)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": fake_cmd}):
yield fake_cmd
@pytest.mark.parametrize(
"exclude",
[True, False],
)
def test_exclude_parameter_gets_passed(exclude, fake_cmd):
"""
Smoke test for for salt.states.statemod.state(). Ensures that we
don't take an exception if optional parameters are not specified in
__opts__ or __env__.
"""
args = ("webserver_setup", "webserver2")
expected_exclude = exclude
kwargs = {
"tgt_type": "glob",
"exclude": expected_exclude,
"highstate": True,
}
saltmod.state(*args, **kwargs)
call = fake_cmd.call_args[1]
assert call["kwarg"]["exclude"] == expected_exclude
def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd):
# Make sure we don't barf on existing behavior
args = ("webserver_setup", "webserver2")
kwargs_without_exclude = {
"tgt_type": "glob",
"highstate": True,
}
saltmod.state(*args, **kwargs_without_exclude)
call = fake_cmd.call_args[1]
assert "exclude" not in call["kwarg"]

View file

@ -1,70 +0,0 @@
import os
import pytest
import salt.utils.verify
from tests.support.mock import patch
@pytest.mark.skip_on_windows(reason="Not applicable for Windows.")
def test_verify_env_race_condition():
def _stat(path):
"""
Helper function for mock_stat, we want to raise errors for specific paths, but not until we get into the proper path.
Until then, just return plain os.stat_result
"""
if path in ("/tmp/salt-dir/.file3", "/tmp/salt-dir/.dir3"):
raise AssertionError("The .file3 and .dir3 paths should never be called!")
if path in ("/tmp/salt-dir/file1", "/tmp/salt-dir/dir1"):
raise FileNotFoundError(
"[Errno 2] No such file or directory: this exception should not be visible"
)
# we need to return at least different st_uid in order to trigger chown for these paths
if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"):
return os.stat_result([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
return os.stat_result([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
def _chown(path, uid, gid):
if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"):
raise FileNotFoundError(
"[Errno 2] No such file or directory: this exception should not be visible"
)
return
with patch("os.chown", side_effect=_chown) as mock_chown, patch(
"os.stat", side_effect=_stat
) as mock_stat, patch(
"salt.utils.verify._get_pwnam", return_value=(None, None, 0, 0)
), patch(
"os.getuid", return_value=0
), patch(
"os.listdir", return_value=["subdir"]
), patch(
"os.path.isdir", return_value=True
), patch(
"salt.utils.path.os_walk",
return_value=[
(
"/tmp/salt-dir",
["dir1", "dir2", ".dir3", "dir4"],
["file1", "file2", ".file3", "file4"],
)
],
):
# verify this runs without issues, even though FNFE is raised
salt.utils.verify.verify_env(["/tmp/salt-dir"], "root", skip_extra=True)
# and verify it got actually called with the valid paths
mock_stat.assert_any_call("/tmp/salt-dir/file1")
mock_stat.assert_any_call("/tmp/salt-dir/dir1")
mock_stat.assert_any_call("/tmp/salt-dir/file4")
mock_stat.assert_any_call("/tmp/salt-dir/dir4")
mock_chown.assert_any_call("/tmp/salt-dir/file4", 0, 0)
mock_chown.assert_any_call("/tmp/salt-dir/dir4", 0, 0)

View file

@ -0,0 +1,17 @@
"""
salt.utils.clean_path works as expected
"""
import salt.utils.verify
def test_clean_path_valid(tmp_path):
path_a = str(tmp_path / "foo")
path_b = str(tmp_path / "foo" / "bar")
assert salt.utils.verify.clean_path(path_a, path_b) == path_b
def test_clean_path_invalid(tmp_path):
path_a = str(tmp_path / "foo")
path_b = str(tmp_path / "baz" / "bar")
assert salt.utils.verify.clean_path(path_a, path_b) == ""

View file

@ -0,0 +1,66 @@
"""
Ensure salt.utils.clean_path works with symlinked directories and files
"""
import ctypes
import pytest
import salt.utils.verify
class Symlink:
"""
symlink(source, link_name) Creates a symbolic link pointing to source named
link_name
"""
def __init__(self):
self._csl = None
def __call__(self, source, link_name):
if self._csl is None:
self._csl = ctypes.windll.kernel32.CreateSymbolicLinkW
self._csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
self._csl.restype = ctypes.c_ubyte
flags = 0
if source is not None and source.is_dir():
flags = 1
if self._csl(str(link_name), str(source), flags) == 0:
raise ctypes.WinError()
@pytest.fixture(scope="module")
def symlink():
return Symlink()
@pytest.fixture
def setup_links(tmp_path, symlink):
to_path = tmp_path / "linkto"
from_path = tmp_path / "linkfrom"
if salt.utils.platform.is_windows():
kwargs = {}
else:
kwargs = {"target_is_directory": True}
if salt.utils.platform.is_windows():
symlink(to_path, from_path, **kwargs)
else:
from_path.symlink_to(to_path, **kwargs)
return to_path, from_path
def test_clean_path_symlinked_src(setup_links):
to_path, from_path = setup_links
test_path = from_path / "test"
expect_path = str(to_path / "test")
ret = salt.utils.verify.clean_path(str(from_path), str(test_path))
assert ret == expect_path, f"{ret} is not {expect_path}"
def test_clean_path_symlinked_tgt(setup_links):
to_path, from_path = setup_links
test_path = to_path / "test"
expect_path = str(to_path / "test")
ret = salt.utils.verify.clean_path(str(from_path), str(test_path))
assert ret == expect_path, f"{ret} is not {expect_path}"

View file

@ -0,0 +1,297 @@
import getpass
import logging
import os
import pathlib
import socket
import stat
import sys
import tempfile
import pytest
import salt.utils.files
import salt.utils.verify
from tests.support.mock import patch
if sys.platform.startswith("win"):
import win32file
else:
import resource
log = logging.getLogger(__name__)
@pytest.mark.skip_on_windows(reason="Not applicable for Windows.")
def test_verify_env_race_condition():
def _stat(path):
"""
Helper function for mock_stat, we want to raise errors for specific paths, but not until we get into the proper path.
Until then, just return plain os.stat_result
"""
if path in ("/tmp/salt-dir/.file3", "/tmp/salt-dir/.dir3"):
raise AssertionError("The .file3 and .dir3 paths should never be called!")
if path in ("/tmp/salt-dir/file1", "/tmp/salt-dir/dir1"):
raise FileNotFoundError(
"[Errno 2] No such file or directory: this exception should not be visible"
)
# we need to return at least different st_uid in order to trigger chown for these paths
if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"):
return os.stat_result([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
return os.stat_result([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
def _chown(path, uid, gid):
if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"):
raise FileNotFoundError(
"[Errno 2] No such file or directory: this exception should not be visible"
)
return
with patch("os.chown", side_effect=_chown) as mock_chown, patch(
"os.stat", side_effect=_stat
) as mock_stat, patch(
"salt.utils.verify._get_pwnam", return_value=(None, None, 0, 0)
), patch(
"os.getuid", return_value=0
), patch(
"os.listdir", return_value=["subdir"]
), patch(
"os.path.isdir", return_value=True
), patch(
"salt.utils.path.os_walk",
return_value=[
(
"/tmp/salt-dir",
["dir1", "dir2", ".dir3", "dir4"],
["file1", "file2", ".file3", "file4"],
)
],
):
# verify this runs without issues, even though FNFE is raised
salt.utils.verify.verify_env(["/tmp/salt-dir"], "root", skip_extra=True)
# and verify it got actually called with the valid paths
mock_stat.assert_any_call("/tmp/salt-dir/file1")
mock_stat.assert_any_call("/tmp/salt-dir/dir1")
mock_stat.assert_any_call("/tmp/salt-dir/file4")
mock_stat.assert_any_call("/tmp/salt-dir/dir4")
mock_chown.assert_any_call("/tmp/salt-dir/file4", 0, 0)
mock_chown.assert_any_call("/tmp/salt-dir/dir4", 0, 0)
def test_valid_id_exception_handler():
"""
Ensure we just return False if we pass in invalid or undefined paths.
Refs #8259
"""
opts = {"pki_dir": "/tmp/whatever"}
assert not salt.utils.verify.valid_id(opts, None)
def test_valid_id_pathsep():
"""
Path separators in id should make it invalid
"""
opts = {"pki_dir": "/tmp/whatever"}
# We have to test both path separators because os.path.normpath will
# convert forward slashes to backslashes on Windows.
for pathsep in ("/", "\\"):
assert not salt.utils.verify.valid_id(opts, pathsep.join(("..", "foobar")))
def test_zmq_verify():
assert salt.utils.verify.zmq_version()
def test_zmq_verify_insufficient():
import zmq
with patch.object(zmq, "__version__", "2.1.0"):
assert not salt.utils.verify.zmq_version()
def test_user():
assert salt.utils.verify.check_user(getpass.getuser())
def test_no_user():
# Catch sys.stderr here since no logging is configured and
# check_user WILL write to sys.stderr
class FakeWriter:
def __init__(self):
self.output = ""
self.errors = "strict"
def write(self, data):
self.output += data
def flush(self):
pass
stderr = sys.stderr
writer = FakeWriter()
sys.stderr = writer
try:
# Now run the test
if sys.platform.startswith("win"):
assert salt.utils.verify.check_user("nouser")
else:
with pytest.raises(SystemExit):
assert not salt.utils.verify.check_user("nouser")
finally:
# Restore sys.stderr
sys.stderr = stderr
if writer.output != 'CRITICAL: User not found: "nouser"\n':
# If there's a different error catch, write it to sys.stderr
sys.stderr.write(writer.output)
@pytest.mark.skip_on_windows(reason="No verify_env Windows")
def test_verify_env(tmp_path):
root_dir = tmp_path / "root"
var_dir = root_dir / "var" / "log" / "salt"
key_dir = root_dir / "key_dir"
salt.utils.verify.verify_env([var_dir], getpass.getuser(), root_dir=root_dir)
assert var_dir.exists()
dir_stat = os.stat(var_dir)
assert dir_stat.st_uid == os.getuid()
assert dir_stat.st_mode & stat.S_IRWXU == stat.S_IRWXU
assert dir_stat.st_mode & stat.S_IRWXG == 40
assert dir_stat.st_mode & stat.S_IRWXO == 5
@pytest.mark.requires_network(only_local_network=True)
def test_verify_socket():
assert salt.utils.verify.verify_socket("", 18000, 18001)
if socket.has_ipv6:
# Only run if Python is built with IPv6 support; otherwise
# this will just fail.
try:
assert salt.utils.verify.verify_socket("::", 18000, 18001)
except OSError:
# Python has IPv6 enabled, but the system cannot create
# IPv6 sockets (otherwise the test would return a bool)
# - skip the test
#
# FIXME - possibly emit a message that the system does
# not support IPv6.
pass
def test_max_open_files(caplog):
with caplog.at_level(logging.DEBUG):
recorded_logs = caplog.record_tuples
logmsg_dbg = "This salt-master instance has accepted {0} minion keys."
logmsg_chk = (
"The number of accepted minion keys({}) should be lower "
"than 1/4 of the max open files soft setting({}). According "
"to the system's hard limit, there's still a margin of {} "
"to raise the salt's max_open_files setting. Please consider "
"raising this value."
)
logmsg_crash = (
"The number of accepted minion keys({}) should be lower "
"than 1/4 of the max open files soft setting({}). "
"salt-master will crash pretty soon! According to the "
"system's hard limit, there's still a margin of {} to "
"raise the salt's max_open_files setting. Please consider "
"raising this value."
)
if sys.platform.startswith("win"):
logmsg_crash = (
"The number of accepted minion keys({}) should be lower "
"than 1/4 of the max open files soft setting({}). "
"salt-master will crash pretty soon! Please consider "
"raising this value."
)
if sys.platform.startswith("win"):
# Check the Windows API for more detail on this
# http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx
# and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html
mof_s = mof_h = win32file._getmaxstdio()
else:
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
tempdir = tempfile.mkdtemp(prefix="fake-keys")
keys_dir = pathlib.Path(tempdir, "minions")
keys_dir.mkdir()
mof_test = 256
if sys.platform.startswith("win"):
win32file._setmaxstdio(mof_test)
else:
resource.setrlimit(resource.RLIMIT_NOFILE, (mof_test, mof_h))
try:
prev = 0
for newmax, level in (
(24, None),
(66, "INFO"),
(127, "WARNING"),
(196, "CRITICAL"),
):
for n in range(prev, newmax):
kpath = pathlib.Path(keys_dir, str(n))
with salt.utils.files.fopen(kpath, "w") as fp_:
fp_.write(str(n))
opts = {"max_open_files": newmax, "pki_dir": tempdir}
salt.utils.verify.check_max_open_files(opts)
if level is None:
# No log message is triggered, only the DEBUG one which
# tells us how many minion keys were accepted.
assert [logmsg_dbg.format(newmax)] == caplog.messages
else:
assert logmsg_dbg.format(newmax) in caplog.messages
assert (
logmsg_chk.format(
newmax,
mof_test,
mof_test - newmax
if sys.platform.startswith("win")
else mof_h - newmax,
)
in caplog.messages
)
prev = newmax
newmax = mof_test
for n in range(prev, newmax):
kpath = pathlib.Path(keys_dir, str(n))
with salt.utils.files.fopen(kpath, "w") as fp_:
fp_.write(str(n))
opts = {"max_open_files": newmax, "pki_dir": tempdir}
salt.utils.verify.check_max_open_files(opts)
assert logmsg_dbg.format(newmax) in caplog.messages
assert (
logmsg_crash.format(
newmax,
mof_test,
mof_test - newmax
if sys.platform.startswith("win")
else mof_h - newmax,
)
in caplog.messages
)
except OSError as err:
if err.errno == 24:
# Too many open files
pytest.skip("We've hit the max open files setting")
raise
finally:
if sys.platform.startswith("win"):
win32file._setmaxstdio(mof_h)
else:
resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h))

View file

@ -0,0 +1,76 @@
import getpass
import os
import pathlib
import pytest
import salt.utils.verify
from tests.support.mock import MagicMock, patch
def test_verify_logs_filter():
filtered = salt.utils.verify.verify_logs_filter(
["udp://foo", "tcp://bar", "/tmp/foo", "file://tmp/bar"]
)
assert filtered == ["/tmp/foo"], filtered
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files_udp_scheme():
salt.utils.verify.verify_log_files(["udp://foo"], getpass.getuser())
assert not pathlib.Path(os.getcwd(), "udp:").is_dir()
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files_tcp_scheme():
salt.utils.verify.verify_log_files(["udp://foo"], getpass.getuser())
assert not pathlib.Path(os.getcwd(), "tcp:").is_dir()
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files_file_scheme():
salt.utils.verify.verify_log_files(["file://{}"], getpass.getuser())
assert not pathlib.Path(os.getcwd(), "file:").is_dir()
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files(tmp_path):
path = tmp_path / "foo" / "bar.log"
assert not path.exists()
salt.utils.verify.verify_log_files([str(path)], getpass.getuser())
assert path.exists()
def test_verify_log():
"""
Test that verify_log works as expected
"""
message = "Insecure logging configuration detected! Sensitive data may be logged."
mock_cheese = MagicMock()
with patch.object(salt.utils.verify.log, "warning", mock_cheese):
salt.utils.verify.verify_log({"log_level": "cheeseshop"})
mock_cheese.assert_called_once_with(message)
mock_trace = MagicMock()
with patch.object(salt.utils.verify.log, "warning", mock_trace):
salt.utils.verify.verify_log({"log_level": "trace"})
mock_trace.assert_called_once_with(message)
mock_none = MagicMock()
with patch.object(salt.utils.verify.log, "warning", mock_none):
salt.utils.verify.verify_log({})
mock_none.assert_called_once_with(message)
mock_info = MagicMock()
with patch.object(salt.utils.verify.log, "warning", mock_info):
salt.utils.verify.verify_log({"log_level": "info"})
assert mock_info.call_count == 0
def test_insecure_log():
"""
test insecure_log that it returns accurate insecure log levels
"""
ret = salt.utils.verify.insecure_log()
assert ret == ["all", "debug", "garbage", "profile", "trace"]

View file

@ -1,516 +0,0 @@
"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
import copy
import os
import tempfile
import pytest
import salt.config
import salt.loader
import salt.states.saltmod as saltmod
import salt.utils.event
import salt.utils.jid
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase
class SaltmodTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.states.saltmod
"""
def setup_loader_modules(self):
utils = salt.loader.utils(
salt.config.DEFAULT_MINION_OPTS.copy(), whitelist=["state"]
)
return {
saltmod: {
"__env__": "base",
"__opts__": {
"__role": "master",
"file_client": "remote",
"sock_dir": tempfile.mkdtemp(dir=RUNTIME_VARS.TMP),
"transport": "tcp",
},
"__salt__": {"saltutil.cmd": MagicMock()},
"__orchestration_jid__": salt.utils.jid.gen_jid({}),
"__utils__": utils,
}
}
# 'state' function tests: 1
@pytest.mark.slow_test
def test_state(self):
"""
Test to invoke a state run on a given target
"""
name = "state"
tgt = "minion1"
comt = "Passed invalid value for 'allow_fail', must be an int"
ret = {"name": name, "changes": {}, "result": False, "comment": comt}
test_ret = {
"name": name,
"changes": {},
"result": True,
"comment": "States ran successfully.",
}
test_batch_return = {
"minion1": {
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
},
"retcode": 0,
},
"out": "highstate",
},
"minion2": {
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
},
"retcode": 0,
},
"out": "highstate",
},
"minion3": {
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
},
"retcode": 0,
},
"out": "highstate",
},
}
self.assertDictEqual(saltmod.state(name, tgt, allow_fail="a"), ret)
comt = "No highstate or sls specified, no execution made"
ret.update({"comment": comt})
self.assertDictEqual(saltmod.state(name, tgt), ret)
comt = "Must pass in boolean for value of 'concurrent'"
ret.update({"comment": comt})
self.assertDictEqual(
saltmod.state(name, tgt, highstate=True, concurrent="a"), ret
)
ret.update({"comment": comt, "result": None})
with patch.dict(saltmod.__opts__, {"test": True}):
self.assertDictEqual(saltmod.state(name, tgt, highstate=True), test_ret)
ret.update(
{
"comment": "States ran successfully. No changes made to silver.",
"result": True,
"__jid__": "20170406104341210934",
}
)
with patch.dict(saltmod.__opts__, {"test": False}):
mock = MagicMock(
return_value={
"silver": {
"jid": "20170406104341210934",
"retcode": 0,
"ret": {
"test_|-notify_me_|-this is a name_|-show_notification": {
"comment": "Notify me",
"name": "this is a name",
"start_time": "10:43:41.487565",
"result": True,
"duration": 0.35,
"__run_num__": 0,
"__sls__": "demo",
"changes": {},
"__id__": "notify_me",
}
},
"out": "highstate",
}
}
)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}):
self.assertDictEqual(saltmod.state(name, tgt, highstate=True), ret)
ret.update(
{
"comment": (
"States ran successfully. No changes made to minion1, minion3,"
" minion2."
)
}
)
del ret["__jid__"]
with patch.dict(saltmod.__opts__, {"test": False}):
with patch.dict(
saltmod.__salt__,
{"saltutil.cmd": MagicMock(return_value=test_batch_return)},
):
state_run = saltmod.state(name, tgt, highstate=True)
# Test return without checking the comment contents. Comments are tested later.
comment = state_run.pop("comment")
ret.pop("comment")
self.assertDictEqual(state_run, ret)
# Check the comment contents in a non-order specific way (ordering fails sometimes on PY3)
self.assertIn("States ran successfully. No changes made to", comment)
for minion in ["minion1", "minion2", "minion3"]:
self.assertIn(minion, comment)
# 'function' function tests: 1
@pytest.mark.slow_test
def test_function(self):
"""
Test to execute a single module function on a remote
minion via salt or salt-ssh
"""
name = "state"
tgt = "larry"
ret = {
"name": name,
"changes": {},
"result": None,
"comment": "Function state would be executed on target {}".format(tgt),
}
with patch.dict(saltmod.__opts__, {"test": True}):
self.assertDictEqual(saltmod.function(name, tgt), ret)
ret.update(
{
"result": True,
"changes": {"ret": {tgt: ""}},
"comment": (
"Function ran successfully. Function state ran on {}.".format(tgt)
),
}
)
with patch.dict(saltmod.__opts__, {"test": False}):
mock_ret = {"larry": {"ret": "", "retcode": 0, "failed": False}}
mock_cmd = MagicMock(return_value=mock_ret)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock_cmd}):
self.assertDictEqual(saltmod.function(name, tgt), ret)
@pytest.mark.slow_test
def test_function_when_no_minions_match(self):
"""
Test to execute a single module function on a remote
minion via salt or salt-ssh
"""
name = "state"
tgt = "larry"
mock_ret = {}
mock_cmd = MagicMock(return_value=mock_ret)
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "No minions responded",
}
with patch.dict(saltmod.__opts__, {"test": False}):
with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock_cmd}):
self.assertDictEqual(saltmod.function(name, tgt), ret)
# 'wait_for_event' function tests: 1
def test_wait_for_event(self):
"""
Test to watch Salt's event bus and block until a condition is met
"""
name = "state"
tgt = "minion1"
comt = "Timeout value reached."
ret = {"name": name, "changes": {}, "result": False, "comment": comt}
class Mockevent:
"""
Mock event class
"""
flag = None
def __init__(self):
self.full = None
def get_event(self, full):
"""
Mock get_event method
"""
self.full = full
if self.flag:
return {"tag": name, "data": {}}
return None
def __enter__(self):
return self
def __exit__(self, *args):
pass
with patch.object(
salt.utils.event, "get_event", MagicMock(return_value=Mockevent())
):
with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}):
with patch(
"salt.states.saltmod.time.time", MagicMock(return_value=1.0)
):
self.assertDictEqual(
saltmod.wait_for_event(name, "salt", timeout=-1.0), ret
)
Mockevent.flag = True
ret.update(
{"comment": "All events seen in 0.0 seconds.", "result": True}
)
self.assertDictEqual(saltmod.wait_for_event(name, ""), ret)
ret.update({"comment": "Timeout value reached.", "result": False})
self.assertDictEqual(
saltmod.wait_for_event(name, tgt, timeout=-1.0), ret
)
# 'wait_for_event' function tests: 2
def test_wait_for_event_list_single_event(self):
"""
Test to watch Salt's event bus and block until a condition is met
"""
name = "presence"
event_id = "lost"
tgt = ["minion_1", "minion_2", "minion_3"]
comt = "Timeout value reached."
ret = {"name": name, "changes": {}, "result": False, "comment": comt}
class Mockevent:
"""
Mock event class
"""
flag = None
def __init__(self):
self.full = None
def get_event(self, full):
"""
Mock get_event method
"""
self.full = full
if self.flag:
return {"tag": name, "data": {"lost": tgt}}
return None
def __enter__(self):
return self
def __exit__(self, *args):
pass
with patch.object(
salt.utils.event, "get_event", MagicMock(return_value=Mockevent())
):
with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}):
with patch(
"salt.states.saltmod.time.time", MagicMock(return_value=1.0)
):
ret.update({"comment": "Timeout value reached.", "result": False})
self.assertDictEqual(
saltmod.wait_for_event(
name, tgt, event_id=event_id, timeout=-1.0
),
ret,
)
Mockevent.flag = True
ret.update(
{
"name": name,
"changes": {"minions_seen": tgt},
"result": True,
"comment": "All events seen in 0.0 seconds.",
}
)
self.assertDictEqual(
saltmod.wait_for_event(
name, copy.deepcopy(tgt), event_id="lost", timeout=1.0
),
ret,
)
# 'runner' function tests: 1
def test_runner(self):
"""
Test to execute a runner module on the master
"""
name = "state"
ret = {
"changes": {"return": True},
"name": "state",
"result": True,
"comment": "Runner function 'state' executed.",
}
runner_mock = MagicMock(return_value={"return": True})
with patch.dict(saltmod.__salt__, {"saltutil.runner": runner_mock}):
self.assertDictEqual(saltmod.runner(name), ret)
# 'wheel' function tests: 1
def test_wheel(self):
"""
Test to execute a wheel module on the master
"""
name = "state"
ret = {
"changes": {"return": True},
"name": "state",
"result": True,
"comment": "Wheel function 'state' executed.",
}
wheel_mock = MagicMock(return_value={"return": True})
with patch.dict(saltmod.__salt__, {"saltutil.wheel": wheel_mock}):
self.assertDictEqual(saltmod.wheel(name), ret)
@pytest.mark.slow_test
def test_state_ssh(self):
"""
Test saltmod state passes roster to saltutil.cmd
"""
origcmd = saltmod.__salt__["saltutil.cmd"]
cmd_kwargs = {}
cmd_args = []
def cmd_mock(*args, **kwargs):
cmd_args.extend(args)
cmd_kwargs.update(kwargs)
return origcmd(*args, **kwargs)
with patch.dict(saltmod.__salt__, {"saltutil.cmd": cmd_mock}):
ret = saltmod.state(
"state.sls", tgt="*", ssh=True, highstate=True, roster="my_roster"
)
assert "roster" in cmd_kwargs
assert cmd_kwargs["roster"] == "my_roster"
@pytest.mark.slow_test
def test_function_ssh(self):
"""
Test saltmod function passes roster to saltutil.cmd
"""
origcmd = saltmod.__salt__["saltutil.cmd"]
cmd_kwargs = {}
cmd_args = []
def cmd_mock(*args, **kwargs):
cmd_args.extend(args)
cmd_kwargs.update(kwargs)
return origcmd(*args, **kwargs)
with patch.dict(saltmod.__opts__, {"test": False}), patch.dict(
saltmod.__salt__, {"saltutil.cmd": cmd_mock}
):
saltmod.function("state", tgt="*", ssh=True, roster="my_roster")
assert "roster" in cmd_kwargs
assert cmd_kwargs["roster"] == "my_roster"
class StatemodTests(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
self.tmp_cachedir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
return {
saltmod: {
"__env__": "base",
"__opts__": {
"id": "webserver2",
"argv": [],
"__role": "master",
"cachedir": self.tmp_cachedir,
"extension_modules": os.path.join(self.tmp_cachedir, "extmods"),
},
"__salt__": {"saltutil.cmd": MagicMock()},
"__orchestration_jid__": salt.utils.jid.gen_jid({}),
}
}
def test_statemod_state(self):
"""Smoke test for for salt.states.statemod.state(). Ensures that we
don't take an exception if optional parameters are not specified in
__opts__ or __env__.
"""
args = ("webserver_setup", "webserver2")
kwargs = {
"tgt_type": "glob",
"fail_minions": None,
"pillar": None,
"top": None,
"batch": None,
"orchestration_jid": None,
"sls": "vroom",
"queue": False,
"concurrent": False,
"highstate": None,
"expr_form": None,
"ret": "",
"ssh": False,
"timeout": None,
"test": False,
"allow_fail": 0,
"saltenv": None,
"expect_minions": False,
}
ret = saltmod.state(*args, **kwargs)
expected = {
"comment": "States ran successfully.",
"changes": {},
"name": "webserver_setup",
"result": True,
}
self.assertEqual(ret, expected)

View file

@ -1,393 +0,0 @@
"""
Test the verification routines
"""
import ctypes
import getpass
import os
import shutil
import socket
import stat
import sys
import tempfile
import pytest
import salt.utils.files
import salt.utils.platform
from salt.utils.verify import (
check_max_open_files,
check_user,
clean_path,
log,
valid_id,
verify_env,
verify_log,
verify_log_files,
verify_logs_filter,
verify_socket,
zmq_version,
)
from tests.support.helpers import TstSuiteLoggingHandler
from tests.support.mock import MagicMock, patch
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase
if sys.platform.startswith("win"):
import win32file
else:
import resource
class TestVerify(TestCase):
"""
Verify module tests
"""
def test_valid_id_exception_handler(self):
"""
Ensure we just return False if we pass in invalid or undefined paths.
Refs #8259
"""
opts = {"pki_dir": "/tmp/whatever"}
self.assertFalse(valid_id(opts, None))
def test_valid_id_pathsep(self):
"""
Path separators in id should make it invalid
"""
opts = {"pki_dir": "/tmp/whatever"}
# We have to test both path separators because os.path.normpath will
# convert forward slashes to backslashes on Windows.
for pathsep in ("/", "\\"):
self.assertFalse(valid_id(opts, pathsep.join(("..", "foobar"))))
def test_zmq_verify(self):
self.assertTrue(zmq_version())
def test_zmq_verify_insufficient(self):
import zmq
with patch.object(zmq, "__version__", "2.1.0"):
self.assertFalse(zmq_version())
def test_user(self):
self.assertTrue(check_user(getpass.getuser()))
def test_no_user(self):
# Catch sys.stderr here since no logging is configured and
# check_user WILL write to sys.stderr
class FakeWriter:
def __init__(self):
self.output = ""
self.errors = "strict"
def write(self, data):
self.output += data
def flush(self):
pass
stderr = sys.stderr
writer = FakeWriter()
sys.stderr = writer
try:
# Now run the test
if sys.platform.startswith("win"):
self.assertTrue(check_user("nouser"))
else:
with self.assertRaises(SystemExit):
self.assertFalse(check_user("nouser"))
finally:
# Restore sys.stderr
sys.stderr = stderr
if writer.output != 'CRITICAL: User not found: "nouser"\n':
# If there's a different error catch, write it to sys.stderr
sys.stderr.write(writer.output)
@pytest.mark.skip_on_windows(reason="No verify_env Windows")
def test_verify_env(self):
root_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
var_dir = os.path.join(root_dir, "var", "log", "salt")
key_dir = os.path.join(root_dir, "key_dir")
verify_env([var_dir], getpass.getuser(), root_dir=root_dir)
self.assertTrue(os.path.exists(var_dir))
dir_stat = os.stat(var_dir)
self.assertEqual(dir_stat.st_uid, os.getuid())
self.assertEqual(dir_stat.st_mode & stat.S_IRWXU, stat.S_IRWXU)
self.assertEqual(dir_stat.st_mode & stat.S_IRWXG, 40)
self.assertEqual(dir_stat.st_mode & stat.S_IRWXO, 5)
@pytest.mark.requires_network(only_local_network=True)
def test_verify_socket(self):
self.assertTrue(verify_socket("", 18000, 18001))
if socket.has_ipv6:
# Only run if Python is built with IPv6 support; otherwise
# this will just fail.
try:
self.assertTrue(verify_socket("::", 18000, 18001))
except OSError:
# Python has IPv6 enabled, but the system cannot create
# IPv6 sockets (otherwise the test would return a bool)
# - skip the test
#
# FIXME - possibly emit a message that the system does
# not support IPv6.
pass
def test_max_open_files(self):
with TstSuiteLoggingHandler() as handler:
logmsg_dbg = "DEBUG:This salt-master instance has accepted {0} minion keys."
logmsg_chk = (
"{0}:The number of accepted minion keys({1}) should be lower "
"than 1/4 of the max open files soft setting({2}). According "
"to the system's hard limit, there's still a margin of {3} "
"to raise the salt's max_open_files setting. Please consider "
"raising this value."
)
logmsg_crash = (
"{0}:The number of accepted minion keys({1}) should be lower "
"than 1/4 of the max open files soft setting({2}). "
"salt-master will crash pretty soon! According to the "
"system's hard limit, there's still a margin of {3} to "
"raise the salt's max_open_files setting. Please consider "
"raising this value."
)
if sys.platform.startswith("win"):
logmsg_crash = (
"{0}:The number of accepted minion keys({1}) should be lower "
"than 1/4 of the max open files soft setting({2}). "
"salt-master will crash pretty soon! Please consider "
"raising this value."
)
if sys.platform.startswith("win"):
# Check the Windows API for more detail on this
# http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx
# and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html
mof_s = mof_h = win32file._getmaxstdio()
else:
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
tempdir = tempfile.mkdtemp(prefix="fake-keys")
keys_dir = os.path.join(tempdir, "minions")
os.makedirs(keys_dir)
mof_test = 256
if sys.platform.startswith("win"):
win32file._setmaxstdio(mof_test)
else:
resource.setrlimit(resource.RLIMIT_NOFILE, (mof_test, mof_h))
try:
prev = 0
for newmax, level in (
(24, None),
(66, "INFO"),
(127, "WARNING"),
(196, "CRITICAL"),
):
for n in range(prev, newmax):
kpath = os.path.join(keys_dir, str(n))
with salt.utils.files.fopen(kpath, "w") as fp_:
fp_.write(str(n))
opts = {"max_open_files": newmax, "pki_dir": tempdir}
check_max_open_files(opts)
if level is None:
# No log message is triggered, only the DEBUG one which
# tells us how many minion keys were accepted.
self.assertEqual([logmsg_dbg.format(newmax)], handler.messages)
else:
self.assertIn(logmsg_dbg.format(newmax), handler.messages)
self.assertIn(
logmsg_chk.format(
level,
newmax,
mof_test,
mof_test - newmax
if sys.platform.startswith("win")
else mof_h - newmax,
),
handler.messages,
)
handler.clear()
prev = newmax
newmax = mof_test
for n in range(prev, newmax):
kpath = os.path.join(keys_dir, str(n))
with salt.utils.files.fopen(kpath, "w") as fp_:
fp_.write(str(n))
opts = {"max_open_files": newmax, "pki_dir": tempdir}
check_max_open_files(opts)
self.assertIn(logmsg_dbg.format(newmax), handler.messages)
self.assertIn(
logmsg_crash.format(
"CRITICAL",
newmax,
mof_test,
mof_test - newmax
if sys.platform.startswith("win")
else mof_h - newmax,
),
handler.messages,
)
handler.clear()
except OSError as err:
if err.errno == 24:
# Too many open files
self.skipTest("We've hit the max open files setting")
raise
finally:
if sys.platform.startswith("win"):
win32file._setmaxstdio(mof_h)
else:
resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h))
shutil.rmtree(tempdir)
def test_verify_log(self):
"""
Test that verify_log works as expected
"""
message = (
"Insecure logging configuration detected! Sensitive data may be logged."
)
mock_cheese = MagicMock()
with patch.object(log, "warning", mock_cheese):
verify_log({"log_level": "cheeseshop"})
mock_cheese.assert_called_once_with(message)
mock_trace = MagicMock()
with patch.object(log, "warning", mock_trace):
verify_log({"log_level": "trace"})
mock_trace.assert_called_once_with(message)
mock_none = MagicMock()
with patch.object(log, "warning", mock_none):
verify_log({})
mock_none.assert_called_once_with(message)
mock_info = MagicMock()
with patch.object(log, "warning", mock_info):
verify_log({"log_level": "info"})
self.assertTrue(mock_info.call_count == 0)
class TestVerifyLog(TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_verify_logs_filter(self):
filtered = verify_logs_filter(
["udp://foo", "tcp://bar", "/tmp/foo", "file://tmp/bar"]
)
assert filtered == ["/tmp/foo"], filtered
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files_udp_scheme(self):
verify_log_files(["udp://foo"], getpass.getuser())
self.assertFalse(os.path.isdir(os.path.join(os.getcwd(), "udp:")))
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files_tcp_scheme(self):
verify_log_files(["udp://foo"], getpass.getuser())
self.assertFalse(os.path.isdir(os.path.join(os.getcwd(), "tcp:")))
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files_file_scheme(self):
verify_log_files(["file://{}"], getpass.getuser())
self.assertFalse(os.path.isdir(os.path.join(os.getcwd(), "file:")))
@pytest.mark.skip_on_windows(reason="Not applicable on Windows")
def test_verify_log_files(self):
path = os.path.join(self.tmpdir, "foo", "bar.log")
self.assertFalse(os.path.exists(path))
verify_log_files([path], getpass.getuser())
self.assertTrue(os.path.exists(path))
class TestCleanPath(TestCase):
"""
salt.utils.clean_path works as expected
"""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_clean_path_valid(self):
path_a = os.path.join(self.tmpdir, "foo")
path_b = os.path.join(self.tmpdir, "foo", "bar")
assert clean_path(path_a, path_b) == path_b
def test_clean_path_invalid(self):
path_a = os.path.join(self.tmpdir, "foo")
path_b = os.path.join(self.tmpdir, "baz", "bar")
assert clean_path(path_a, path_b) == ""
__CSL = None
def symlink(source, link_name):
"""
symlink(source, link_name) Creates a symbolic link pointing to source named
link_name
"""
global __CSL
if __CSL is None:
csl = ctypes.windll.kernel32.CreateSymbolicLinkW
csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
csl.restype = ctypes.c_ubyte
__CSL = csl
flags = 0
if source is not None and os.path.isdir(source):
flags = 1
if __CSL(link_name, source, flags) == 0:
raise ctypes.WinError()
class TestCleanPathLink(TestCase):
"""
Ensure salt.utils.clean_path works with symlinked directories and files
"""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.to_path = os.path.join(self.tmpdir, "linkto")
self.from_path = os.path.join(self.tmpdir, "linkfrom")
if salt.utils.platform.is_windows():
kwargs = {}
else:
kwargs = {"target_is_directory": True}
if salt.utils.platform.is_windows():
symlink(self.to_path, self.from_path, **kwargs)
else:
os.symlink(self.to_path, self.from_path, **kwargs)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_clean_path_symlinked_src(self):
test_path = os.path.join(self.from_path, "test")
expect_path = os.path.join(self.to_path, "test")
ret = clean_path(self.from_path, test_path)
assert ret == expect_path, "{} is not {}".format(ret, expect_path)
def test_clean_path_symlinked_tgt(self):
test_path = os.path.join(self.to_path, "test")
expect_path = os.path.join(self.to_path, "test")
ret = clean_path(self.from_path, test_path)
assert ret == expect_path, "{} is not {}".format(ret, expect_path)

View file

@ -9,6 +9,7 @@ import logging
import os
import pathlib
import random
import shutil
import sys
import time
from typing import TYPE_CHECKING, Any
@ -629,10 +630,10 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False):
"""
_matrix = []
_splits = {
"functional": 5,
"integration": 7,
"scenarios": 2,
"unit": 4,
"functional": 3,
"integration": 5,
"scenarios": 1,
"unit": 2,
}
for transport in ("zeromq", "tcp"):
if transport == "tcp":
@ -718,8 +719,11 @@ def pkg_matrix(
"ubuntu-20.04-arm64",
"ubuntu-22.04-arm64",
"photonos-3",
"photonos-3-arm64",
"photonos-4",
"photonos-4-arm64",
"photonos-5",
"photonos-5-arm64",
]
and pkg_type != "MSI"
):
@ -756,8 +760,11 @@ def pkg_matrix(
"ubuntu-22.04",
"ubuntu-22.04-arm64",
"photonos-3",
"photonos-3-arm64",
"photonos-4",
"photonos-4-arm64",
"photonos-5",
"photonos-5-arm64",
]
and pkg_type != "MSI"
):
@ -776,6 +783,13 @@ def pkg_matrix(
if version < tools.utils.Version("3006.0")
]
for version in versions:
if (
version
and distro_slug.startswith("photonos-5")
and version < tools.utils.Version("3007.0")
):
# We never build packages for Photon OS 5 prior to 3007.0
continue
_matrix.append(
{
"test-chunk": session,
@ -1105,3 +1119,89 @@ def define_cache_seed(ctx: Context, static_cache_seed: str, randomize: bool = Fa
ctx.info("Writing 'cache-seed' to the github outputs file")
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"cache-seed={cache_seed}\n")
@ci.command(
name="upload-coverage",
arguments={
"commit_sha": {
"help": "The commit SHA",
"required": True,
},
"reports_path": {
"help": "The path to the directory containing the XML Coverage Reports",
},
},
)
def upload_coverage(ctx: Context, reports_path: pathlib.Path, commit_sha: str = None):
"""
Upload code coverage to codecov.
"""
codecov = shutil.which("codecov")
if not codecov:
ctx.error("Could not find the path to the 'codecov' binary")
ctx.exit(1)
codecov_args = [
codecov,
"--nonZero",
"--sha",
commit_sha,
]
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
if gh_event_path is not None:
try:
gh_event = json.loads(open(gh_event_path).read())
pr_event_data = gh_event.get("pull_request")
if pr_event_data:
codecov_args.extend(["--parent", pr_event_data["base"]["sha"]])
except Exception as exc:
ctx.error(
f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc
)
sleep_time = 15
for fpath in reports_path.glob("*.xml"):
if fpath.name in ("salt.xml", "tests.xml"):
flags = fpath.stem
else:
try:
section, distro_slug, nox_session = fpath.stem.split("..")
except ValueError:
ctx.error(
f"The file {fpath} does not respect the expected naming convention "
"'{salt|tests}..<distro-slug>..<nox-session>.xml'. Skipping..."
)
continue
flags = f"{section},{distro_slug}"
max_attempts = 3
current_attempt = 0
while True:
current_attempt += 1
ctx.info(
f"Uploading '{fpath}' coverage report to codecov (attempt {current_attempt} of {max_attempts}) ..."
)
ret = ctx.run(
*codecov_args,
"--file",
str(fpath),
"--name",
fpath.stem,
"--flags",
flags,
check=False,
)
if ret.returncode == 0:
break
if current_attempt >= max_attempts:
ctx.error(f"Failed to upload {fpath} to codecov")
ctx.exit(1)
ctx.warn(f"Waiting {sleep_time} seconds until next retry...")
time.sleep(sleep_time)
ctx.exit(0)

View file

@ -323,7 +323,7 @@ _rpm_distro_info = {
"amazon": ["2"],
"redhat": ["7", "8", "9"],
"fedora": ["36", "37", "38"],
"photon": ["3", "4"],
"photon": ["3", "4", "5"],
}

View file

@ -102,8 +102,11 @@ def generate_workflows(ctx: Context):
("fedora-38", "Fedora 38", "x86_64"),
("opensuse-15", "Opensuse 15", "x86_64"),
("photonos-3", "Photon OS 3", "x86_64"),
("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64"),
("photonos-4", "Photon OS 4", "x86_64"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64"),
("photonos-5", "Photon OS 5", "x86_64"),
("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64"),
@ -128,8 +131,11 @@ def generate_workflows(ctx: Context):
("debian-12", "Debian 12", "x86_64", "deb"),
("debian-12-arm64", "Debian 12 Arm64", "aarch64", "deb"),
("photonos-3", "Photon OS 3", "x86_64", "rpm"),
("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "rpm"),
("photonos-4", "Photon OS 4", "x86_64", "rpm"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm"),
("photonos-5", "Photon OS 5", "x86_64", "rpm"),
("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "rpm"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb"),
@ -168,8 +174,11 @@ def generate_workflows(ctx: Context):
("fedora-38-arm64", "Fedora 38 Arm64", "aarch64"),
("opensuse-15", "Opensuse 15", "x86_64"),
("photonos-3", "Photon OS 3", "x86_64"),
("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64"),
("photonos-4", "Photon OS 4", "x86_64"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64"),
("photonos-5", "Photon OS 5", "x86_64"),
("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64"),

View file

@ -6,6 +6,7 @@ from __future__ import annotations
import json
import logging
import pathlib
from typing import TYPE_CHECKING
from ptscripts import Context, command_group
@ -147,3 +148,75 @@ def download_pkgs_artifact(
ctx=ctx, run_id=run_id, slug=slug, repository=repository
)
ctx.exit(exitcode)
@download.command(
name="artifact",
arguments={
"artifact_name": {
"help": "The name of the artifact to download",
},
"dest": {
"help": "The path to the file downloaded",
},
"run_id": {
"help": "The workflow run ID from where to download artifacts from",
},
"branch": {
"help": "The branch from where to look for artifacts.",
"metavar": "BRANCH_NAME",
},
"pr": {
"help": "The pull-request from where to look for artifacts.",
"metavar": "PR_NUMBER",
},
"nightly": {
"help": "The nightly build branch from where to look for artifacts.",
"metavar": "BRANCH_NAME",
},
"repository": {
"help": "The repository to query, e.g. saltstack/salt",
},
},
)
def download_artifact(
ctx: Context,
artifact_name: pathlib.Path,
dest: pathlib.Path,
run_id: int = None,
branch: str = None,
nightly: str = None,
pr: int = None,
repository: str = "saltstack/salt",
):
"""
Download CI built packages artifacts.
"""
if TYPE_CHECKING:
assert artifact_name is not None
assert dest is not None
if run_id is not None:
actual_run_id = run_id
else:
potential_run_id = tools.utils.gh.discover_run_id(
ctx, branch=branch, nightly=nightly, pr=pr, repository=repository
)
if potential_run_id is not None:
actual_run_id = potential_run_id
else:
ctx.exit(1, "Could not discover run ID")
succeeded = tools.utils.gh.download_artifact(
ctx,
dest,
actual_run_id,
repository=repository,
artifact_name=str(artifact_name),
)
ctx.info(succeeded)
if succeeded:
ctx.info(f"Downloaded {artifact_name} to {dest}")
ctx.exit(0)
else:
ctx.exit(1)

View file

@ -138,7 +138,9 @@ def download_nox_artifact(
if nox is None:
ctx.error("Could not find the 'nox' binary in $PATH")
return ExitCode.FAIL
ret = ctx.run(nox, "-e", "decompress-dependencies", "--", slug, check=False)
ret = ctx.run(
nox, "--force-color", "-e", "decompress-dependencies", "--", slug, check=False
)
if ret.returncode:
ctx.error("Failed to decompress the nox dependencies")
return ExitCode.FAIL