Merge 3006.x into master

This commit is contained in:
Pedro Algarvio 2023-10-07 08:43:20 +01:00
commit 144b1314d8
No known key found for this signature in database
GPG key ID: BB36BF6584A298FF
36 changed files with 1636 additions and 822 deletions

View file

@ -1,112 +1,72 @@
codecov:
ci:
- jenkins.saltproject.io
- github.com
max_report_age: 24 # The age you want coverage reports to expire at, or if you
# want to disable this check. Expired reports will not be processed by codecov.
require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
max_report_age: 72 # The age, in hours, you want coverage reports to expire at, or if you
# want to disable this check. Expired reports will not be processed by codecov.
require_ci_to_pass: false
# notify:
# after_n_builds: 25 # Only notify after N builds
# # This value is the output of:
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
# wait_for_ci: yes # Should Codecov wait for all CI statuses to complete before sending ours.
# # Note: Codecov considers all non-codecov statues to be CI statuses
notify:
wait_for_ci: true # Should Codecov wait for all CI statuses to complete before sending ours.
# Note: Codecov considers all non-codecov statuses to be CI statuses
# manual_trigger: true # We manually tell codecov to merge and process all uploaded coverage reports
ignore:
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
- salt/ext/.* # ignore any code under salt/ext
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
- salt/ext/.* # ignore any code under salt/ext
coverage:
round: up
range: 70..100
precision: 2
notify: {}
status: false
status:
project:
default:
target: auto # auto compares coverage to the previous base commit
threshold: 5% # adjust accordingly based on how flaky your tests are
# this allows a 5% drop from the previous base commit coverage
flags:
- salt
- tests
# status:
# project: # measuring the overall project coverage
# default:
# informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# # resulting status will pass no matter what the coverage is or what other settings
# # are specified. Informational mode is great to use if you want to expose codecov
# # information to other developers in your pull request without necessarily gating
# # PRs on that information.
# target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
# flags:
# - salt
# - tests
# salt: # declare a new status context "salt"
# informational: true
# paths: "!tests/" # remove all files in "tests/"
# target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# # options: success, error, failure
# if_not_found: success # if parent is not found report status as success, error, or failure
# if_ci_failed: error # if ci fails report status as success, error, or failure
# flags:
# - salt
# tests: # declare a new status context "tests"
# informational: true
# #target: 100% # we always want 100% coverage here
# target: auto # auto while we get this going
# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
# paths: "!salt/" # only include coverage in "tests/" folder
# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# # options: success, error, failure
# if_not_found: success # if parent is not found report status as success, error, or failure
# if_ci_failed: error # if ci fails report status as success, error, or failure
# flags:
# - tests
salt: # declare a new status context "salt"
paths:
- "!tests/" # remove all files in "tests/"
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
flags:
- salt
# patch: # pull requests only: this commit status will measure the
# # entire pull requests Coverage Diff. Checking if the lines
# # adjusted are covered at least X%.
# default:
# informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# # resulting status will pass no matter what the coverage is or what other settings
# # are specified. Informational mode is great to use if you want to expose codecov
# # information to other developers in your pull request without necessarily gating
# # PRs on that information.
# target: 100% # Newly added lines must have 100% coverage
# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# # options: success, error, failure
# if_not_found: success
# if_ci_failed: error
# flags:
# - salt
# - tests
tests: # declare a new status context "tests"
paths:
- "!salt/" # only include coverage in "tests/" folder
target: auto # auto while we get this going
flags:
- tests
# changes: # if there are any unexpected changes in coverage
# default:
# informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# # resulting status will pass no matter what the coverage is or what other settings
# # are specified. Informational mode is great to use if you want to expose codecov
# # information to other developers in your pull request without necessarily gating
# # PRs on that information.
# if_no_uploads: error
# if_not_found: success
# if_ci_failed: error
# flags:
# - salt
# - tests
patch: # pull requests only: this commit status will measure the
# entire pull requests Coverage Diff. Checking if the lines
# adjusted are covered at least X%.
default:
target: auto # auto compares coverage to the previous base commit
threshold: 5% # adjust accordingly based on how flaky your tests are
# this allows a 5% drop from the previous base commit coverage
flags:
salt:
paths:
- salt/
carryforward: true # https://docs.codecov.io/docs/carryforward-flags
carryforward: true # https://docs.codecov.io/docs/carryforward-flags
tests:
paths:
- tests/
carryforward: true
pkg:
paths:
- pkg/tests
carryforward: true
unit:
paths:
- tests/unit
@ -125,23 +85,10 @@ flags:
- tests/integration
- tests/pytests/integration
carryforward: true
system:
paths:
- tests/integration
- tests/pytests/integration
carryforward: true
#comment:
# layout: "reach, diff, flags, files"
# after_n_builds: 46 # Only comment on PRs after N builds
# # This value is the output of:
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
#
# behavior: new # Comment posting behaviour
# # default: update, if exists. Otherwise post new.
# # once: update, if exists. Otherwise post new. Skip if deleted.
# # new: delete old and post new.
# # spammy: post new (do not delete old comments).
#
# Disable Comments
comment: off
comment:
layout: "reach, diff, flags, files"
behavior: default # Comment posting behaviour
# default: update, if exists. Otherwise post new.
# once: update, if exists. Otherwise post new. Skip if deleted.
# new: delete old and post new.

View file

@ -48,6 +48,7 @@ env:
COLUMNS: 190
PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/"
PIP_EXTRA_INDEX_URL: "https://pypi.org/simple"
GITHUB_ACTIONS_PIPELINE: "1"
jobs:

View file

@ -244,6 +244,8 @@ jobs:
id: get-release-changelog-target
run: |
tools ci get-release-changelog-target ${{ github.event_name }}
pre-commit:
name: Pre-Commit
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1191,7 +1193,7 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
amazonlinux-2-pkg-tests:
name: Amazon Linux 2
name: Amazon Linux 2 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1213,7 +1215,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7
name: CentOS 7 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1235,7 +1237,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-8-pkg-tests:
name: CentOS Stream 8
name: CentOS Stream 8 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1257,7 +1259,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-9-pkg-tests:
name: CentOS Stream 9
name: CentOS Stream 9 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1279,7 +1281,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-10-pkg-tests:
name: Debian 10
name: Debian 10 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1301,7 +1303,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11
name: Debian 11 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1323,7 +1325,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-arm64-pkg-tests:
name: Debian 11 Arm64
name: Debian 11 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1345,7 +1347,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-pkg-tests:
name: Photon OS 3
name: Photon OS 3 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1367,7 +1369,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4
name: Photon OS 4 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1389,7 +1391,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-arm64-pkg-tests:
name: Photon OS 4 Arm64
name: Photon OS 4 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1411,7 +1413,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04
name: Ubuntu 20.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1433,7 +1435,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-arm64-pkg-tests:
name: Ubuntu 20.04 Arm64
name: Ubuntu 20.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1455,7 +1457,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-pkg-tests:
name: Ubuntu 22.04
name: Ubuntu 22.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1477,7 +1479,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-arm64-pkg-tests:
name: Ubuntu 22.04 Arm64
name: Ubuntu 22.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1499,7 +1501,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12
name: macOS 12 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
- prepare-workflow
@ -1521,7 +1523,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-nsis-pkg-tests:
name: Windows 2016
name: Windows 2016 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1543,7 +1545,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-msi-pkg-tests:
name: Windows 2016
name: Windows 2016 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1565,7 +1567,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-nsis-pkg-tests:
name: Windows 2019
name: Windows 2019 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1587,7 +1589,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-msi-pkg-tests:
name: Windows 2019
name: Windows 2019 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1609,7 +1611,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-nsis-pkg-tests:
name: Windows 2022
name: Windows 2022 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1631,7 +1633,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-msi-pkg-tests:
name: Windows 2022
name: Windows 2022 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -2207,6 +2209,8 @@ jobs:
run: |
python3 -m pip install 'nox==2022.8.7'
- name: Get coverage reports
id: get-coverage-reports
uses: actions/download-artifact@v3
@ -2221,6 +2225,14 @@ jobs:
run: |
nox --force-color -e combine-coverage
- name: Report Salt Code Coverage
run: |
nox --force-color -e coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
@ -2233,11 +2245,27 @@ jobs:
retention-days: 7
if-no-files-found: error
- name: Create Full Code Coverage HTML Report
- name: Report Combined Code Coverage
run: |
nox --force-color -e coverage-report
- name: Create Combined Code Coverage JSON Report
run: |
nox --force-color -e create-json-coverage-reports
- name: Upload Combined Code Coverage JSON Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-json-report
path: artifacts/coverage/coverage.json
retention-days: 7
if-no-files-found: error
- name: Create Combined Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report
- name: Upload Full Code Coverage HTML Report
- name: Upload Combined Code Coverage HTML Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-html-report

View file

@ -288,6 +288,8 @@ jobs:
id: get-release-changelog-target
run: |
tools ci get-release-changelog-target ${{ github.event_name }}
pre-commit:
name: Pre-Commit
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1247,7 +1249,7 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
amazonlinux-2-pkg-tests:
name: Amazon Linux 2
name: Amazon Linux 2 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1269,7 +1271,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7
name: CentOS 7 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1291,7 +1293,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-8-pkg-tests:
name: CentOS Stream 8
name: CentOS Stream 8 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1313,7 +1315,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-9-pkg-tests:
name: CentOS Stream 9
name: CentOS Stream 9 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1335,7 +1337,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-10-pkg-tests:
name: Debian 10
name: Debian 10 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1357,7 +1359,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11
name: Debian 11 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1379,7 +1381,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-arm64-pkg-tests:
name: Debian 11 Arm64
name: Debian 11 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1401,7 +1403,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-pkg-tests:
name: Photon OS 3
name: Photon OS 3 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1423,7 +1425,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4
name: Photon OS 4 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1445,7 +1447,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-arm64-pkg-tests:
name: Photon OS 4 Arm64
name: Photon OS 4 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1467,7 +1469,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04
name: Ubuntu 20.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1489,7 +1491,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-arm64-pkg-tests:
name: Ubuntu 20.04 Arm64
name: Ubuntu 20.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1511,7 +1513,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-pkg-tests:
name: Ubuntu 22.04
name: Ubuntu 22.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1533,7 +1535,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-arm64-pkg-tests:
name: Ubuntu 22.04 Arm64
name: Ubuntu 22.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1555,7 +1557,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12
name: macOS 12 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
- prepare-workflow
@ -1577,7 +1579,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-nsis-pkg-tests:
name: Windows 2016
name: Windows 2016 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1599,7 +1601,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-msi-pkg-tests:
name: Windows 2016
name: Windows 2016 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1621,7 +1623,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-nsis-pkg-tests:
name: Windows 2019
name: Windows 2019 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1643,7 +1645,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-msi-pkg-tests:
name: Windows 2019
name: Windows 2019 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1665,7 +1667,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-nsis-pkg-tests:
name: Windows 2022
name: Windows 2022 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1687,7 +1689,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-msi-pkg-tests:
name: Windows 2022
name: Windows 2022 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -2263,6 +2265,8 @@ jobs:
run: |
python3 -m pip install 'nox==2022.8.7'
- name: Get coverage reports
id: get-coverage-reports
uses: actions/download-artifact@v3
@ -2277,6 +2281,14 @@ jobs:
run: |
nox --force-color -e combine-coverage
- name: Report Salt Code Coverage
run: |
nox --force-color -e coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
@ -2289,11 +2301,27 @@ jobs:
retention-days: 7
if-no-files-found: error
- name: Create Full Code Coverage HTML Report
- name: Report Combined Code Coverage
run: |
nox --force-color -e coverage-report
- name: Create Combined Code Coverage JSON Report
run: |
nox --force-color -e create-json-coverage-reports
- name: Upload Combined Code Coverage JSON Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-json-report
path: artifacts/coverage/coverage.json
retention-days: 7
if-no-files-found: error
- name: Create Combined Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report
- name: Upload Full Code Coverage HTML Report
- name: Upload Combined Code Coverage HTML Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-html-report
@ -2302,7 +2330,7 @@ jobs:
if-no-files-found: error
build-src-repo:
name: Build Source Repository
name: Build Repository
environment: nightly
runs-on:
- self-hosted
@ -2310,6 +2338,13 @@ jobs:
- repo-nightly
needs:
- prepare-workflow
- build-source-tarball
strategy:
fail-fast: false
matrix:
pkg-type:
- src
steps:
- uses: actions/checkout@v4
@ -2393,7 +2428,7 @@ jobs:
archive-name: src-repo
build-deb-repo:
name: Build DEB Repository
name: Build Repository
environment: nightly
runs-on:
- self-hosted
@ -2406,28 +2441,36 @@ jobs:
fail-fast: false
matrix:
include:
- distro: debian
- pkg-type: deb
distro: debian
version: "10"
arch: x86_64
- distro: debian
- pkg-type: deb
distro: debian
version: "10"
arch: aarch64
- distro: debian
- pkg-type: deb
distro: debian
version: "11"
arch: x86_64
- distro: debian
- pkg-type: deb
distro: debian
version: "11"
arch: aarch64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "20.04"
arch: x86_64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "20.04"
arch: aarch64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "22.04"
arch: x86_64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "22.04"
arch: aarch64
@ -2504,7 +2547,7 @@ jobs:
archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
build-rpm-repo:
name: Build RPM Repository
name: Build Repository
environment: nightly
runs-on:
- self-hosted
@ -2517,58 +2560,76 @@ jobs:
fail-fast: false
matrix:
include:
- distro: amazon
- pkg-type: rpm
distro: amazon
version: "2"
arch: x86_64
- distro: amazon
- pkg-type: rpm
distro: amazon
version: "2"
arch: aarch64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "8"
arch: x86_64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "8"
arch: aarch64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "9"
arch: x86_64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "9"
arch: aarch64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "36"
arch: x86_64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "36"
arch: aarch64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "37"
arch: x86_64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "37"
arch: aarch64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "38"
arch: x86_64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "38"
arch: aarch64
- distro: photon
- pkg-type: rpm
distro: photon
version: "3"
arch: x86_64
- distro: photon
- pkg-type: rpm
distro: photon
version: "3"
arch: aarch64
- distro: photon
- pkg-type: rpm
distro: photon
version: "4"
arch: x86_64
- distro: photon
- pkg-type: rpm
distro: photon
version: "4"
arch: aarch64
@ -2648,7 +2709,7 @@ jobs:
archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
build-windows-repo:
name: Build Windows Repository
name: Build Repository
environment: nightly
runs-on:
- self-hosted
@ -2657,6 +2718,12 @@ jobs:
needs:
- prepare-workflow
- build-windows-pkgs-onedir
strategy:
fail-fast: false
matrix:
pkg-type:
- windows
steps:
- uses: actions/checkout@v4
@ -2742,7 +2809,7 @@ jobs:
archive-name: windows-repo
build-macos-repo:
name: Build macOS Repository
name: Build Repository
environment: nightly
runs-on:
- self-hosted
@ -2751,6 +2818,12 @@ jobs:
needs:
- prepare-workflow
- build-macos-pkgs-onedir
strategy:
fail-fast: false
matrix:
pkg-type:
- macos
steps:
- uses: actions/checkout@v4
@ -2818,7 +2891,7 @@ jobs:
archive-name: macos-repo
build-onedir-repo:
name: Build Onedir Repository
name: Build Repository
environment: nightly
runs-on:
- self-hosted
@ -2826,6 +2899,15 @@ jobs:
- repo-nightly
needs:
- prepare-workflow
- build-salt-onedir-linux
- build-salt-onedir-macos
- build-salt-onedir-windows
strategy:
fail-fast: false
matrix:
pkg-type:
- onedir
steps:
- uses: actions/checkout@v4

View file

@ -224,7 +224,7 @@ jobs:
tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }}
almalinux-8-package-download-tests:
name: Test Alma Linux 8 package Downloads
name: Alma Linux 8 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -247,7 +247,7 @@ jobs:
secrets: inherit
almalinux-8-arm64-package-download-tests:
name: Test Alma Linux 8 Arm64 package Downloads
name: Alma Linux 8 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -270,7 +270,7 @@ jobs:
secrets: inherit
almalinux-9-package-download-tests:
name: Test Alma Linux 9 package Downloads
name: Alma Linux 9 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -293,7 +293,7 @@ jobs:
secrets: inherit
almalinux-9-arm64-package-download-tests:
name: Test Alma Linux 9 Arm64 package Downloads
name: Alma Linux 9 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -316,7 +316,7 @@ jobs:
secrets: inherit
amazonlinux-2-package-download-tests:
name: Test Amazon Linux 2 package Downloads
name: Amazon Linux 2 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -339,7 +339,7 @@ jobs:
secrets: inherit
amazonlinux-2-arm64-package-download-tests:
name: Test Amazon Linux 2 Arm64 package Downloads
name: Amazon Linux 2 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -362,7 +362,7 @@ jobs:
secrets: inherit
centos-7-package-download-tests:
name: Test CentOS 7 package Downloads
name: CentOS 7 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -385,7 +385,7 @@ jobs:
secrets: inherit
centos-7-arm64-package-download-tests:
name: Test CentOS 7 Arm64 package Downloads
name: CentOS 7 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -408,7 +408,7 @@ jobs:
secrets: inherit
centosstream-8-package-download-tests:
name: Test CentOS Stream 8 package Downloads
name: CentOS Stream 8 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -431,7 +431,7 @@ jobs:
secrets: inherit
centosstream-8-arm64-package-download-tests:
name: Test CentOS Stream 8 Arm64 package Downloads
name: CentOS Stream 8 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -454,7 +454,7 @@ jobs:
secrets: inherit
centosstream-9-package-download-tests:
name: Test CentOS Stream 9 package Downloads
name: CentOS Stream 9 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -477,7 +477,7 @@ jobs:
secrets: inherit
centosstream-9-arm64-package-download-tests:
name: Test CentOS Stream 9 Arm64 package Downloads
name: CentOS Stream 9 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -500,7 +500,7 @@ jobs:
secrets: inherit
debian-10-package-download-tests:
name: Test Debian 10 package Downloads
name: Debian 10 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -523,7 +523,7 @@ jobs:
secrets: inherit
debian-11-package-download-tests:
name: Test Debian 11 package Downloads
name: Debian 11 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -546,7 +546,7 @@ jobs:
secrets: inherit
debian-11-arm64-package-download-tests:
name: Test Debian 11 Arm64 package Downloads
name: Debian 11 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -569,7 +569,7 @@ jobs:
secrets: inherit
fedora-37-package-download-tests:
name: Test Fedora 37 package Downloads
name: Fedora 37 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -592,7 +592,7 @@ jobs:
secrets: inherit
fedora-37-arm64-package-download-tests:
name: Test Fedora 37 Arm64 package Downloads
name: Fedora 37 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -615,7 +615,7 @@ jobs:
secrets: inherit
fedora-38-package-download-tests:
name: Test Fedora 38 package Downloads
name: Fedora 38 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -638,7 +638,7 @@ jobs:
secrets: inherit
fedora-38-arm64-package-download-tests:
name: Test Fedora 38 Arm64 package Downloads
name: Fedora 38 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -661,7 +661,7 @@ jobs:
secrets: inherit
photonos-3-package-download-tests:
name: Test Photon OS 3 package Downloads
name: Photon OS 3 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -684,7 +684,7 @@ jobs:
secrets: inherit
photonos-4-package-download-tests:
name: Test Photon OS 4 package Downloads
name: Photon OS 4 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -707,7 +707,7 @@ jobs:
secrets: inherit
photonos-4-arm64-package-download-tests:
name: Test Photon OS 4 Arm64 package Downloads
name: Photon OS 4 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -730,7 +730,7 @@ jobs:
secrets: inherit
ubuntu-2004-package-download-tests:
name: Test Ubuntu 20.04 package Downloads
name: Ubuntu 20.04 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -753,7 +753,7 @@ jobs:
secrets: inherit
ubuntu-2004-arm64-package-download-tests:
name: Test Ubuntu 20.04 Arm64 package Downloads
name: Ubuntu 20.04 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -776,7 +776,7 @@ jobs:
secrets: inherit
ubuntu-2204-package-download-tests:
name: Test Ubuntu 22.04 package Downloads
name: Ubuntu 22.04 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -799,7 +799,7 @@ jobs:
secrets: inherit
ubuntu-2204-arm64-package-download-tests:
name: Test Ubuntu 22.04 Arm64 package Downloads
name: Ubuntu 22.04 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -822,7 +822,7 @@ jobs:
secrets: inherit
ubuntu-2204-onedir-download-tests:
name: Test Ubuntu 22.04 onedir Downloads
name: Ubuntu 22.04 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -845,7 +845,7 @@ jobs:
secrets: inherit
ubuntu-2204-arm64-onedir-download-tests:
name: Test Ubuntu 22.04 Arm64 onedir Downloads
name: Ubuntu 22.04 Arm64 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -867,8 +867,8 @@ jobs:
pkg-type: onedir
secrets: inherit
macos-12-Package-download-tests:
name: Test macOS 12 Package Downloads
macos-12-package-download-tests:
name: macOS 12 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -890,8 +890,8 @@ jobs:
pkg-type: package
secrets: inherit
macos-12-Onedir-download-tests:
name: Test macOS 12 Onedir Downloads
macos-12-onedir-download-tests:
name: macOS 12 Package Download
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
@ -913,9 +913,9 @@ jobs:
pkg-type: onedir
secrets: inherit
windows-2022-NSIS-amd64-download-tests:
windows-2022-nsis-amd64-download-tests:
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
name: Test Windows 2022 amd64 NSIS Package Downloads
name: Windows 2022 Package Download
needs:
- prepare-workflow
- publish-repositories
@ -936,9 +936,9 @@ jobs:
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
windows-2022-MSI-amd64-download-tests:
windows-2022-msi-amd64-download-tests:
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
name: Test Windows 2022 amd64 MSI Package Downloads
name: Windows 2022 Package Download
needs:
- prepare-workflow
- publish-repositories
@ -959,9 +959,9 @@ jobs:
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
windows-2022-Onedir-amd64-download-tests:
windows-2022-onedir-amd64-download-tests:
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
name: Test Windows 2022 amd64 Onedir Package Downloads
name: Windows 2022 Package Download
needs:
- prepare-workflow
- publish-repositories
@ -1021,11 +1021,11 @@ jobs:
- ubuntu-2204-arm64-package-download-tests
- ubuntu-2204-onedir-download-tests
- ubuntu-2204-arm64-onedir-download-tests
- macos-12-Package-download-tests
- macos-12-Onedir-download-tests
- windows-2022-NSIS-amd64-download-tests
- windows-2022-MSI-amd64-download-tests
- windows-2022-Onedir-amd64-download-tests
- macos-12-package-download-tests
- macos-12-onedir-download-tests
- windows-2022-nsis-amd64-download-tests
- windows-2022-msi-amd64-download-tests
- windows-2022-onedir-amd64-download-tests
environment: release
steps:
- name: Clone The Salt Repository

View file

@ -278,6 +278,8 @@ jobs:
id: get-release-changelog-target
run: |
tools ci get-release-changelog-target ${{ github.event_name }}
pre-commit:
name: Pre-Commit
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1225,7 +1227,7 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
amazonlinux-2-pkg-tests:
name: Amazon Linux 2
name: Amazon Linux 2 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1247,7 +1249,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7
name: CentOS 7 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1269,7 +1271,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-8-pkg-tests:
name: CentOS Stream 8
name: CentOS Stream 8 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1291,7 +1293,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-9-pkg-tests:
name: CentOS Stream 9
name: CentOS Stream 9 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1313,7 +1315,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-10-pkg-tests:
name: Debian 10
name: Debian 10 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1335,7 +1337,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11
name: Debian 11 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1357,7 +1359,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-arm64-pkg-tests:
name: Debian 11 Arm64
name: Debian 11 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1379,7 +1381,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-pkg-tests:
name: Photon OS 3
name: Photon OS 3 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1401,7 +1403,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4
name: Photon OS 4 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1423,7 +1425,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-arm64-pkg-tests:
name: Photon OS 4 Arm64
name: Photon OS 4 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1445,7 +1447,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04
name: Ubuntu 20.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1467,7 +1469,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-arm64-pkg-tests:
name: Ubuntu 20.04 Arm64
name: Ubuntu 20.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1489,7 +1491,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-pkg-tests:
name: Ubuntu 22.04
name: Ubuntu 22.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1511,7 +1513,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-arm64-pkg-tests:
name: Ubuntu 22.04 Arm64
name: Ubuntu 22.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1533,7 +1535,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12
name: macOS 12 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
- prepare-workflow
@ -1555,7 +1557,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-nsis-pkg-tests:
name: Windows 2016
name: Windows 2016 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1577,7 +1579,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-msi-pkg-tests:
name: Windows 2016
name: Windows 2016 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1599,7 +1601,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-nsis-pkg-tests:
name: Windows 2019
name: Windows 2019 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1621,7 +1623,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-msi-pkg-tests:
name: Windows 2019
name: Windows 2019 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1643,7 +1645,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-nsis-pkg-tests:
name: Windows 2022
name: Windows 2022 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1665,7 +1667,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-msi-pkg-tests:
name: Windows 2022
name: Windows 2022 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -2241,6 +2243,8 @@ jobs:
run: |
python3 -m pip install 'nox==2022.8.7'
- name: Get coverage reports
id: get-coverage-reports
uses: actions/download-artifact@v3
@ -2255,6 +2259,14 @@ jobs:
run: |
nox --force-color -e combine-coverage
- name: Report Salt Code Coverage
run: |
nox --force-color -e coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
@ -2267,11 +2279,27 @@ jobs:
retention-days: 7
if-no-files-found: error
- name: Create Full Code Coverage HTML Report
- name: Report Combined Code Coverage
run: |
nox --force-color -e coverage-report
- name: Create Combined Code Coverage JSON Report
run: |
nox --force-color -e create-json-coverage-reports
- name: Upload Combined Code Coverage JSON Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-json-report
path: artifacts/coverage/coverage.json
retention-days: 7
if-no-files-found: error
- name: Create Combined Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report
- name: Upload Full Code Coverage HTML Report
- name: Upload Combined Code Coverage HTML Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-html-report

View file

@ -283,6 +283,8 @@ jobs:
id: get-release-changelog-target
run: |
tools ci get-release-changelog-target ${{ github.event_name }}
pre-commit:
name: Pre-Commit
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1247,7 +1249,7 @@ jobs:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
amazonlinux-2-pkg-tests:
name: Amazon Linux 2
name: Amazon Linux 2 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1269,7 +1271,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7
name: CentOS 7 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1291,7 +1293,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-8-pkg-tests:
name: CentOS Stream 8
name: CentOS Stream 8 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1313,7 +1315,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centosstream-9-pkg-tests:
name: CentOS Stream 9
name: CentOS Stream 9 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1335,7 +1337,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-10-pkg-tests:
name: Debian 10
name: Debian 10 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1357,7 +1359,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11
name: Debian 11 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1379,7 +1381,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-arm64-pkg-tests:
name: Debian 11 Arm64
name: Debian 11 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1401,7 +1403,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-3-pkg-tests:
name: Photon OS 3
name: Photon OS 3 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1423,7 +1425,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-pkg-tests:
name: Photon OS 4
name: Photon OS 4 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1445,7 +1447,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
photonos-4-arm64-pkg-tests:
name: Photon OS 4 Arm64
name: Photon OS 4 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1467,7 +1469,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-pkg-tests:
name: Ubuntu 20.04
name: Ubuntu 20.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1489,7 +1491,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2004-arm64-pkg-tests:
name: Ubuntu 20.04 Arm64
name: Ubuntu 20.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1511,7 +1513,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-pkg-tests:
name: Ubuntu 22.04
name: Ubuntu 22.04 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1533,7 +1535,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2204-arm64-pkg-tests:
name: Ubuntu 22.04 Arm64
name: Ubuntu 22.04 Arm64 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1555,7 +1557,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12
name: macOS 12 Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
- prepare-workflow
@ -1577,7 +1579,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-nsis-pkg-tests:
name: Windows 2016
name: Windows 2016 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1599,7 +1601,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2016-msi-pkg-tests:
name: Windows 2016
name: Windows 2016 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1621,7 +1623,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-nsis-pkg-tests:
name: Windows 2019
name: Windows 2019 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1643,7 +1645,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2019-msi-pkg-tests:
name: Windows 2019
name: Windows 2019 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1665,7 +1667,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-nsis-pkg-tests:
name: Windows 2022
name: Windows 2022 NSIS Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -1687,7 +1689,7 @@ jobs:
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
windows-2022-msi-pkg-tests:
name: Windows 2022
name: Windows 2022 MSI Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -2189,7 +2191,7 @@ jobs:
skip-junit-reports: true
build-src-repo:
name: Build Source Repository
name: Build Repository
environment: staging
runs-on:
- self-hosted
@ -2197,6 +2199,13 @@ jobs:
- repo-staging
needs:
- prepare-workflow
- build-source-tarball
strategy:
fail-fast: false
matrix:
pkg-type:
- src
steps:
- uses: actions/checkout@v4
@ -2280,7 +2289,7 @@ jobs:
archive-name: src-repo
build-deb-repo:
name: Build DEB Repository
name: Build Repository
environment: staging
runs-on:
- self-hosted
@ -2293,28 +2302,36 @@ jobs:
fail-fast: false
matrix:
include:
- distro: debian
- pkg-type: deb
distro: debian
version: "10"
arch: x86_64
- distro: debian
- pkg-type: deb
distro: debian
version: "10"
arch: aarch64
- distro: debian
- pkg-type: deb
distro: debian
version: "11"
arch: x86_64
- distro: debian
- pkg-type: deb
distro: debian
version: "11"
arch: aarch64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "20.04"
arch: x86_64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "20.04"
arch: aarch64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "22.04"
arch: x86_64
- distro: ubuntu
- pkg-type: deb
distro: ubuntu
version: "22.04"
arch: aarch64
@ -2391,7 +2408,7 @@ jobs:
archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
build-rpm-repo:
name: Build RPM Repository
name: Build Repository
environment: staging
runs-on:
- self-hosted
@ -2404,58 +2421,76 @@ jobs:
fail-fast: false
matrix:
include:
- distro: amazon
- pkg-type: rpm
distro: amazon
version: "2"
arch: x86_64
- distro: amazon
- pkg-type: rpm
distro: amazon
version: "2"
arch: aarch64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "8"
arch: x86_64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "8"
arch: aarch64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "9"
arch: x86_64
- distro: redhat
- pkg-type: rpm
distro: redhat
version: "9"
arch: aarch64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "36"
arch: x86_64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "36"
arch: aarch64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "37"
arch: x86_64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "37"
arch: aarch64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "38"
arch: x86_64
- distro: fedora
- pkg-type: rpm
distro: fedora
version: "38"
arch: aarch64
- distro: photon
- pkg-type: rpm
distro: photon
version: "3"
arch: x86_64
- distro: photon
- pkg-type: rpm
distro: photon
version: "3"
arch: aarch64
- distro: photon
- pkg-type: rpm
distro: photon
version: "4"
arch: x86_64
- distro: photon
- pkg-type: rpm
distro: photon
version: "4"
arch: aarch64
@ -2537,7 +2572,7 @@ jobs:
archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo
build-windows-repo:
name: Build Windows Repository
name: Build Repository
environment: staging
runs-on:
- self-hosted
@ -2546,6 +2581,12 @@ jobs:
needs:
- prepare-workflow
- build-windows-pkgs-onedir
strategy:
fail-fast: false
matrix:
pkg-type:
- windows
steps:
- uses: actions/checkout@v4
@ -2631,7 +2672,7 @@ jobs:
archive-name: windows-repo
build-macos-repo:
name: Build macOS Repository
name: Build Repository
environment: staging
runs-on:
- self-hosted
@ -2640,6 +2681,12 @@ jobs:
needs:
- prepare-workflow
- build-macos-pkgs-onedir
strategy:
fail-fast: false
matrix:
pkg-type:
- macos
steps:
- uses: actions/checkout@v4
@ -2707,7 +2754,7 @@ jobs:
archive-name: macos-repo
build-onedir-repo:
name: Build Onedir Repository
name: Build Repository
environment: staging
runs-on:
- self-hosted
@ -2715,6 +2762,15 @@ jobs:
- repo-staging
needs:
- prepare-workflow
- build-salt-onedir-linux
- build-salt-onedir-macos
- build-salt-onedir-windows
strategy:
fail-fast: false
matrix:
pkg-type:
- onedir
steps:
- uses: actions/checkout@v4
@ -2934,7 +2990,7 @@ jobs:
if-no-files-found: error
almalinux-8-package-download-tests:
name: Test Alma Linux 8 package Downloads
name: Alma Linux 8 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -2956,7 +3012,7 @@ jobs:
secrets: inherit
almalinux-8-arm64-package-download-tests:
name: Test Alma Linux 8 Arm64 package Downloads
name: Alma Linux 8 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -2978,7 +3034,7 @@ jobs:
secrets: inherit
almalinux-9-package-download-tests:
name: Test Alma Linux 9 package Downloads
name: Alma Linux 9 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3000,7 +3056,7 @@ jobs:
secrets: inherit
almalinux-9-arm64-package-download-tests:
name: Test Alma Linux 9 Arm64 package Downloads
name: Alma Linux 9 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3022,7 +3078,7 @@ jobs:
secrets: inherit
amazonlinux-2-package-download-tests:
name: Test Amazon Linux 2 package Downloads
name: Amazon Linux 2 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3044,7 +3100,7 @@ jobs:
secrets: inherit
amazonlinux-2-arm64-package-download-tests:
name: Test Amazon Linux 2 Arm64 package Downloads
name: Amazon Linux 2 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3066,7 +3122,7 @@ jobs:
secrets: inherit
centos-7-package-download-tests:
name: Test CentOS 7 package Downloads
name: CentOS 7 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3088,7 +3144,7 @@ jobs:
secrets: inherit
centos-7-arm64-package-download-tests:
name: Test CentOS 7 Arm64 package Downloads
name: CentOS 7 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3110,7 +3166,7 @@ jobs:
secrets: inherit
centosstream-8-package-download-tests:
name: Test CentOS Stream 8 package Downloads
name: CentOS Stream 8 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3132,7 +3188,7 @@ jobs:
secrets: inherit
centosstream-8-arm64-package-download-tests:
name: Test CentOS Stream 8 Arm64 package Downloads
name: CentOS Stream 8 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3154,7 +3210,7 @@ jobs:
secrets: inherit
centosstream-9-package-download-tests:
name: Test CentOS Stream 9 package Downloads
name: CentOS Stream 9 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3176,7 +3232,7 @@ jobs:
secrets: inherit
centosstream-9-arm64-package-download-tests:
name: Test CentOS Stream 9 Arm64 package Downloads
name: CentOS Stream 9 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3198,7 +3254,7 @@ jobs:
secrets: inherit
debian-10-package-download-tests:
name: Test Debian 10 package Downloads
name: Debian 10 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3220,7 +3276,7 @@ jobs:
secrets: inherit
debian-11-package-download-tests:
name: Test Debian 11 package Downloads
name: Debian 11 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3242,7 +3298,7 @@ jobs:
secrets: inherit
debian-11-arm64-package-download-tests:
name: Test Debian 11 Arm64 package Downloads
name: Debian 11 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3264,7 +3320,7 @@ jobs:
secrets: inherit
fedora-37-package-download-tests:
name: Test Fedora 37 package Downloads
name: Fedora 37 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3286,7 +3342,7 @@ jobs:
secrets: inherit
fedora-37-arm64-package-download-tests:
name: Test Fedora 37 Arm64 package Downloads
name: Fedora 37 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3308,7 +3364,7 @@ jobs:
secrets: inherit
fedora-38-package-download-tests:
name: Test Fedora 38 package Downloads
name: Fedora 38 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3330,7 +3386,7 @@ jobs:
secrets: inherit
fedora-38-arm64-package-download-tests:
name: Test Fedora 38 Arm64 package Downloads
name: Fedora 38 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3352,7 +3408,7 @@ jobs:
secrets: inherit
photonos-3-package-download-tests:
name: Test Photon OS 3 package Downloads
name: Photon OS 3 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3374,7 +3430,7 @@ jobs:
secrets: inherit
photonos-4-package-download-tests:
name: Test Photon OS 4 package Downloads
name: Photon OS 4 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3396,7 +3452,7 @@ jobs:
secrets: inherit
photonos-4-arm64-package-download-tests:
name: Test Photon OS 4 Arm64 package Downloads
name: Photon OS 4 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3418,7 +3474,7 @@ jobs:
secrets: inherit
ubuntu-2004-package-download-tests:
name: Test Ubuntu 20.04 package Downloads
name: Ubuntu 20.04 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3440,7 +3496,7 @@ jobs:
secrets: inherit
ubuntu-2004-arm64-package-download-tests:
name: Test Ubuntu 20.04 Arm64 package Downloads
name: Ubuntu 20.04 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3462,7 +3518,7 @@ jobs:
secrets: inherit
ubuntu-2204-package-download-tests:
name: Test Ubuntu 22.04 package Downloads
name: Ubuntu 22.04 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3484,7 +3540,7 @@ jobs:
secrets: inherit
ubuntu-2204-arm64-package-download-tests:
name: Test Ubuntu 22.04 Arm64 package Downloads
name: Ubuntu 22.04 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3506,7 +3562,7 @@ jobs:
secrets: inherit
ubuntu-2204-onedir-download-tests:
name: Test Ubuntu 22.04 onedir Downloads
name: Ubuntu 22.04 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3528,7 +3584,7 @@ jobs:
secrets: inherit
ubuntu-2204-arm64-onedir-download-tests:
name: Test Ubuntu 22.04 Arm64 onedir Downloads
name: Ubuntu 22.04 Arm64 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3549,8 +3605,8 @@ jobs:
pkg-type: onedir
secrets: inherit
macos-12-Package-download-tests:
name: Test macOS 12 Package Downloads
macos-12-package-download-tests:
name: macOS 12 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3571,8 +3627,8 @@ jobs:
pkg-type: package
secrets: inherit
macos-12-Onedir-download-tests:
name: Test macOS 12 Onedir Downloads
macos-12-onedir-download-tests:
name: macOS 12 Package Download
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -3593,9 +3649,9 @@ jobs:
pkg-type: onedir
secrets: inherit
windows-2022-NSIS-amd64-download-tests:
windows-2022-nsis-amd64-download-tests:
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
name: Test Windows 2022 amd64 NSIS Package Downloads
name: Windows 2022 Package Download
needs:
- prepare-workflow
- publish-repositories
@ -3615,9 +3671,9 @@ jobs:
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
windows-2022-MSI-amd64-download-tests:
windows-2022-msi-amd64-download-tests:
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
name: Test Windows 2022 amd64 MSI Package Downloads
name: Windows 2022 Package Download
needs:
- prepare-workflow
- publish-repositories
@ -3637,9 +3693,9 @@ jobs:
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
windows-2022-Onedir-amd64-download-tests:
windows-2022-onedir-amd64-download-tests:
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
name: Test Windows 2022 amd64 Onedir Package Downloads
name: Windows 2022 Package Download
needs:
- prepare-workflow
- publish-repositories
@ -3770,11 +3826,11 @@ jobs:
- ubuntu-2204-arm64-package-download-tests
- ubuntu-2204-onedir-download-tests
- ubuntu-2204-arm64-onedir-download-tests
- macos-12-Package-download-tests
- macos-12-Onedir-download-tests
- windows-2022-NSIS-amd64-download-tests
- windows-2022-MSI-amd64-download-tests
- windows-2022-Onedir-amd64-download-tests
- macos-12-package-download-tests
- macos-12-onedir-download-tests
- windows-2022-nsis-amd64-download-tests
- windows-2022-msi-amd64-download-tests
- windows-2022-onedir-amd64-download-tests
environment: staging
runs-on:
- self-hosted

View file

@ -13,7 +13,8 @@
("ubuntu", "22.04", "x86_64"),
("ubuntu", "22.04", "aarch64"),
) %>
- distro: <{ distro }>
- pkg-type: deb
distro: <{ distro }>
version: "<{ version }>"
arch: <{ arch }>
<%- endfor %>

View file

@ -1,4 +1,10 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- macos
steps:
- uses: actions/checkout@v4

View file

@ -1,4 +1,10 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- onedir
steps:
- uses: actions/checkout@v4

View file

@ -1,17 +1,17 @@
<%- for platform, type, display_name, needs_pkg in (
(None, "src", "Source", False),
("linux", "deb", "DEB", True),
("linux", "rpm", "RPM", True),
("windows", "windows", "Windows", True),
("macos", "macos", "macOS", True),
(None, "onedir", "Onedir", False),
) %>
<%- for type, display_name in (
("src", "Source"),
("deb", "DEB"),
("rpm", "RPM"),
("windows", "Windows"),
("macos", "macOS"),
("onedir", "Onedir"),
) %>
<%- set job_name = "build-{}-repo".format(type) %>
<%- do build_repo_needs.append(job_name) %>
<{ job_name }>:
name: Build <{ display_name }> Repository
name: Build Repository
environment: <{ gh_environment }>
runs-on:
- self-hosted
@ -19,11 +19,16 @@
- repo-<{ gh_environment }>
needs:
- prepare-workflow
<%- if needs_pkg %>
<%- if type not in ("src", "onedir") %>
- build-<{ type }>-pkgs-onedir
<%- elif platform %>
- build-salt-onedir-<{ platform }>
<%- elif type == 'onedir' %>
- build-salt-onedir-linux
- build-salt-onedir-macos
- build-salt-onedir-windows
<%- elif type == 'src' %>
- build-source-tarball
<%- endif %>
<%- include "build-{}-repo.yml.jinja".format(type) %>
<%- endfor %>

View file

@ -23,7 +23,8 @@
("photon", "4", "x86_64"),
("photon", "4", "aarch64"),
) %>
- distro: <{ distro }>
- pkg-type: rpm
distro: <{ distro }>
version: "<{ version }>"
arch: <{ arch }>
<%- endfor %>

View file

@ -1,4 +1,10 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- src
steps:
- uses: actions/checkout@v4

View file

@ -1,4 +1,10 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- windows
steps:
- uses: actions/checkout@v4

View file

@ -352,6 +352,19 @@
run: |
python3 -m pip install 'nox==<{ nox_version }>'
{# We can't yet use tokenless uploads with the codecov CLI
- name: Install Codecov CLI
run: |
python3 -m pip install codecov-cli
- name: Tell Codecov To Process Reports
run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
send-notifications --git-service github --sha ${{ github.sha }}
#}
- name: Get coverage reports
id: get-coverage-reports
uses: actions/download-artifact@v3
@ -366,6 +379,14 @@
run: |
nox --force-color -e combine-coverage
- name: Report Salt Code Coverage
run: |
nox --force-color -e coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
@ -378,11 +399,27 @@
retention-days: 7
if-no-files-found: error
- name: Create Full Code Coverage HTML Report
- name: Report Combined Code Coverage
run: |
nox --force-color -e coverage-report
- name: Create Combined Code Coverage JSON Report
run: |
nox --force-color -e create-json-coverage-reports
- name: Upload Combined Code Coverage JSON Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-json-report
path: artifacts/coverage/coverage.json
retention-days: 7
if-no-files-found: error
- name: Create Combined Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report
- name: Upload Full Code Coverage HTML Report
- name: Upload Combined Code Coverage HTML Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-html-report

View file

@ -309,6 +309,27 @@ jobs:
run: |
tools ci get-release-changelog-target ${{ github.event_name }}
{# We can't yet use tokenless uploads with the codecov CLI
- name: Install Codecov CLI
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
run: |
python3 -m pip install codecov-cli
- name: Save Commit Metadata In Codecov
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
create-commit --git-service github --sha ${{ github.sha }}
- name: Create Codecov Coverage Report
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
create-report --git-service github --sha ${{ github.sha }}
#}
<%- endblock prepare_workflow_job %>
<%- endif %>

View file

@ -259,7 +259,7 @@ permissions:
tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }}
<%- if includes.get("test-pkg-downloads", True) %>
<%- include "test-pkg-repo-downloads.yml.jinja" %>
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
<%- endif %>
release:

View file

@ -158,7 +158,7 @@ concurrency:
if-no-files-found: error
<%- if includes.get("test-pkg-downloads", True) %>
<%- include "test-pkg-repo-downloads.yml.jinja" %>
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
<%- endif %>
publish-pypi:

View file

@ -1,42 +1,41 @@
<%- set linux_pkg_tests = (
("almalinux-8", "Alma Linux 8", "x86_64", "package"),
("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64", "package"),
("almalinux-9", "Alma Linux 9", "x86_64", "package"),
("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64", "package"),
("amazonlinux-2", "Amazon Linux 2", "x86_64", "package"),
("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "package"),
("centos-7", "CentOS 7", "x86_64", "package"),
("centos-7-arm64", "CentOS 7 Arm64", "aarch64", "package"),
("centosstream-8", "CentOS Stream 8", "x86_64", "package"),
("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64", "package"),
("centosstream-9", "CentOS Stream 9", "x86_64", "package"),
("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64", "package"),
("debian-10", "Debian 10", "x86_64", "package"),
("debian-11", "Debian 11", "x86_64", "package"),
("debian-11-arm64", "Debian 11 Arm64", "aarch64", "package"),
("fedora-37", "Fedora 37", "x86_64", "package"),
("fedora-37-arm64", "Fedora 37 Arm64", "aarch64", "package"),
("fedora-38", "Fedora 38", "x86_64", "package"),
("fedora-38-arm64", "Fedora 38 Arm64", "aarch64", "package"),
("photonos-3", "Photon OS 3", "x86_64", "package"),
("photonos-4", "Photon OS 4", "x86_64", "package"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "package"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "package"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "package"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "package"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "package"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "onedir"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "onedir")
("almalinux-8", "Alma Linux 8", "x86_64", "Package"),
("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64", "Package"),
("almalinux-9", "Alma Linux 9", "x86_64", "Package"),
("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64", "Package"),
("amazonlinux-2", "Amazon Linux 2", "x86_64", "Package"),
("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "Package"),
("centos-7", "CentOS 7", "x86_64", "Package"),
("centos-7-arm64", "CentOS 7 Arm64", "aarch64", "Package"),
("centosstream-8", "CentOS Stream 8", "x86_64", "Package"),
("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64", "Package"),
("centosstream-9", "CentOS Stream 9", "x86_64", "Package"),
("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64", "Package"),
("debian-10", "Debian 10", "x86_64", "Package"),
("debian-11", "Debian 11", "x86_64", "Package"),
("debian-11-arm64", "Debian 11 Arm64", "aarch64", "Package"),
("fedora-37", "Fedora 37", "x86_64", "Package"),
("fedora-37-arm64", "Fedora 37 Arm64", "aarch64", "Package"),
("fedora-38", "Fedora 38", "x86_64", "Package"),
("fedora-38-arm64", "Fedora 38 Arm64", "aarch64", "Package"),
("photonos-3", "Photon OS 3", "x86_64", "Package"),
("photonos-4", "Photon OS 4", "x86_64", "Package"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "Package"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "Package"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "Package"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "Package"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "Package"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "Onedir"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "Onedir")
) %>
<%- for slug, display_name, arch, pkg_type in linux_pkg_tests %>
<%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type) %>
<%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type.lower()) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
name: Test <{ display_name }> <{ pkg_type }> Downloads
name: <{ display_name }> Package Download
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
@ -71,11 +70,11 @@
("macos-12", "macOS 12", "x86_64", "Package"),
("macos-12", "macOS 12", "x86_64", "Onedir"),
) %>
<%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type) %>
<%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type.lower()) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
name: Test <{ display_name }> <{ pkg_type }> Downloads
name: <{ display_name }> Package Download
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
@ -109,7 +108,7 @@
("windows-2022", "Windows 2022", "amd64"),
) %>
<%- for pkg_type in ("NSIS", "MSI", "Onedir") %>
<%- set job_name = "{}-{}-{}-download-tests".format(slug.replace(".", ""), pkg_type, arch.lower()) %>
<%- set job_name = "{}-{}-{}-download-tests".format(slug.replace(".", ""), pkg_type.lower(), arch.lower()) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
@ -118,7 +117,7 @@
<%- else %>
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
<%- endif %>
name: Test <{ display_name }> <{ arch }> <{ pkg_type }> Package Downloads
name: <{ display_name }> Package Download
needs:
- prepare-workflow
- publish-repositories

View file

@ -1,4 +1,3 @@
<%- set linux_pkg_tests = (
("almalinux-8", "Alma Linux 8", "x86_64", "rpm"),
("almalinux-9", "Alma Linux 9", "x86_64", "rpm"),
@ -39,7 +38,7 @@
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }>
name: <{ display_name }> Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -69,7 +68,7 @@
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }>
name: <{ display_name }> Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
- prepare-workflow
@ -101,7 +100,7 @@
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }>
name: <{ display_name }> <{ pkg_type }> Package
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow

View file

@ -90,7 +90,9 @@ jobs:
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 360 # 6 Hours
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 5 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 120 || 300 }}
needs:
- generate-matrix
strategy:
@ -101,6 +103,12 @@ jobs:
SALT_TRANSPORT: ${{ matrix.transport }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
@ -310,13 +318,13 @@ jobs:
sudo chown -R "$(id -un)" .
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false
run: |
nox -e combine-coverage
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
@ -334,16 +342,8 @@ jobs:
path: |
artifacts/coverage/
- name: Upload All Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: all-testrun-coverage-artifacts
path: |
artifacts/coverage/
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
@ -351,7 +351,7 @@ jobs:
artifacts/xml-unittests-output/
- name: Upload Test Run Log Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
@ -400,18 +400,88 @@ jobs:
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Combine Code Coverage
if: ${{ inputs.skip-code-coverage == false }}
continue-on-error: true
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
nox -e create-xml-coverage-reports
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: code-coverage
path: artifacts/coverage
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/macos/codecov
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }} \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }} \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
@ -419,14 +489,21 @@ jobs:
run: |
nox --force-color -e report-coverage -- salt
- name: Report Tests Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage -- tests
- name: Report Combined Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage
- name: Rename Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: all-testrun-coverage-artifacts
path: artifacts/coverage

View file

@ -98,7 +98,9 @@ jobs:
- self-hosted
- linux
- bastion
timeout-minutes: 300 # 5 Hours - More than this and something is wrong
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 5 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 120 || 300 }}
needs:
- generate-matrix
strategy:
@ -110,6 +112,12 @@ jobs:
TEST_GROUP: ${{ matrix.test-group || 1 }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
@ -253,13 +261,13 @@ jobs:
--test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
@ -283,16 +291,8 @@ jobs:
path: |
artifacts/coverage/
- name: Upload All Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: all-testrun-coverage-artifacts
path: |
artifacts/coverage/
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
@ -300,7 +300,7 @@ jobs:
artifacts/xml-unittests-output/
- name: Upload Test Run Log Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
@ -310,7 +310,7 @@ jobs:
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && inputs.skip-junit-reports == false
if: always() && inputs.skip-junit-reports == false && job.status != 'cancelled'
with:
check_name: Test Results(${{ inputs.distro-slug }}, transport=${{ matrix.transport }}, tests-chunk=${{ matrix.tests-chunk }}, group=${{ matrix.test-group || '1' }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
@ -347,18 +347,88 @@ jobs:
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Combine Code Coverage
if: ${{ inputs.skip-code-coverage == false }}
continue-on-error: true
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
nox -e create-xml-coverage-reports
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: code-coverage
path: artifacts/coverage
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }} \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }} \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
@ -366,14 +436,21 @@ jobs:
run: |
nox --force-color -e report-coverage -- salt
- name: Report Tests Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage -- tests
- name: Report Combined Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage
- name: Rename Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: all-testrun-coverage-artifacts
path: artifacts/coverage

View file

@ -70,13 +70,20 @@ env:
jobs:
test:
name: Test Pkg Download
name: Test
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
arch:
- ${{ inputs.arch }}
pkg-type:
- ${{ inputs.pkg-type }}
steps:
@ -174,10 +181,15 @@ jobs:
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm create-xml-coverage-reports ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
@ -197,6 +209,84 @@ jobs:
run: |
sudo chown -R "$(id -un)" .
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} --flag pkg \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }},pkg \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} --flag pkg \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }},pkg \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3

View file

@ -68,10 +68,17 @@ env:
jobs:
test:
name: Test Pkg Download
name: Test
runs-on: ${{ inputs.distro-slug }}
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
arch:
- ${{ inputs.arch }}
pkg-type:
- ${{ inputs.pkg-type }}
steps:
@ -166,22 +173,100 @@ jobs:
run: |
sudo chown -R "$(id -un)" .
- name: Combine Coverage Reports
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
nox --force-color -e create-xml-coverage-reports
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/macos/codecov
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} --flag pkg \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }},pkg \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} --flag pkg \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }},pkg \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always() && job.status != 'cancelled'
if: always()
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}

View file

@ -75,15 +75,28 @@ env:
jobs:
test:
name: Test Pkg Download
name: Test
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
arch:
- ${{ inputs.arch }}
pkg-type:
- ${{ inputs.pkg-type }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
@ -173,10 +186,15 @@ jobs:
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm create-xml-coverage-reports ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
@ -196,6 +214,84 @@ jobs:
run: |
sudo chown -R "$(id -un)" .
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} --flag pkg \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }},pkg \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} --flag pkg \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }},pkg \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3

View file

@ -69,7 +69,7 @@ env:
jobs:
generate-matrix:
name: Test Pkg Matrix
name: Generate Matrix
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
outputs:
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
@ -93,7 +93,7 @@ jobs:
test:
name: Test Pkg
name: Test
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
@ -104,6 +104,12 @@ jobs:
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
@ -189,7 +195,7 @@ jobs:
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
@ -197,7 +203,7 @@ jobs:
tree -a artifacts
- name: Upload Test Run Artifacts
if: always() && job.status != 'cancelled'
if: always()
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }}
@ -207,7 +213,7 @@ jobs:
!artifacts/salt-*.tar.*
report:
name: Test Pkg Reports
name: Report
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:

View file

@ -71,7 +71,7 @@ env:
jobs:
generate-matrix:
name: Test ${{ inputs.pkg-type }} Pkg Matrix
name: Generate Matrix
runs-on:
- self-hosted
- linux
@ -98,7 +98,7 @@ jobs:
test:
name: Test Pkg
name: Test
runs-on:
- self-hosted
- linux
@ -112,6 +112,12 @@ jobs:
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
@ -218,7 +224,7 @@ jobs:
!artifacts/salt-*.tar.*
report:
name: Test Pkg Reports
name: Report
runs-on:
- self-hosted
- linux

View file

@ -1,8 +1,8 @@
{
"almalinux-8-arm64": {
"ami": "ami-0f08fc00f1689a8ec",
"ami": "ami-04c86a9990a3836b9",
"ami_description": "CI Image of AlmaLinux 8 arm64",
"ami_name": "salt-project/ci/almalinux/8/arm64/20231003.2057",
"ami_name": "salt-project/ci/almalinux/8/arm64/20231005.1556",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -10,9 +10,9 @@
"ssh_username": "ec2-user"
},
"almalinux-8": {
"ami": "ami-08f648e0e6fa619c2",
"ami": "ami-059ed5c00c02c564b",
"ami_description": "CI Image of AlmaLinux 8 x86_64",
"ami_name": "salt-project/ci/almalinux/8/x86_64/20231003.2058",
"ami_name": "salt-project/ci/almalinux/8/x86_64/20231005.1557",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -20,9 +20,9 @@
"ssh_username": "ec2-user"
},
"almalinux-9-arm64": {
"ami": "ami-0394b210e1e09b962",
"ami": "ami-0213f3e31656f7393",
"ami_description": "CI Image of AlmaLinux 9 arm64",
"ami_name": "salt-project/ci/almalinux/9/arm64/20231003.2058",
"ami_name": "salt-project/ci/almalinux/9/arm64/20231005.1557",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -30,9 +30,9 @@
"ssh_username": "ec2-user"
},
"almalinux-9": {
"ami": "ami-0a909a150cfebea5b",
"ami": "ami-0c4e36d63e728ee21",
"ami_description": "CI Image of AlmaLinux 9 x86_64",
"ami_name": "salt-project/ci/almalinux/9/x86_64/20231003.2100",
"ami_name": "salt-project/ci/almalinux/9/x86_64/20231005.1557",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -40,9 +40,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2-arm64": {
"ami": "ami-038eac6a08feecdb2",
"ami": "ami-010d24ab23bfb0330",
"ami_description": "CI Image of AmazonLinux 2 arm64",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20231003.2104",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20231005.1614",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -50,9 +50,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2": {
"ami": "ami-09682e96e7785642d",
"ami": "ami-0ad016fe17f923c6b",
"ami_description": "CI Image of AmazonLinux 2 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20231003.2104",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20231005.1614",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -60,9 +60,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023.1-arm64": {
"ami": "ami-0e46c84fb43817334",
"ami": "ami-037b7d6177ec8259d",
"ami_description": "CI Image of AmazonLinux 2023.1 arm64",
"ami_name": "salt-project/ci/amazonlinux/2023.1/arm64/20231003.2103",
"ami_name": "salt-project/ci/amazonlinux/2023.1/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -70,9 +70,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023.1": {
"ami": "ami-0ac591368ec230345",
"ami": "ami-08e04f6dd44c858fa",
"ami_description": "CI Image of AmazonLinux 2023.1 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2023.1/x86_64/20231003.2103",
"ami_name": "salt-project/ci/amazonlinux/2023.1/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -80,9 +80,9 @@
"ssh_username": "ec2-user"
},
"archlinux-lts": {
"ami": "ami-017de6f1e636021a0",
"ami": "ami-0b88ddfb321aff9ba",
"ami_description": "CI Image of ArchLinux lts x86_64",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20231003.2108",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "false",
"instance_type": "t3a.large",
@ -90,9 +90,9 @@
"ssh_username": "arch"
},
"centos-7-arm64": {
"ami": "ami-088cb5f3066efa748",
"ami": "ami-01d5ee66081a02154",
"ami_description": "CI Image of CentOS 7 arm64",
"ami_name": "salt-project/ci/centos/7/arm64/20231003.2108",
"ami_name": "salt-project/ci/centos/7/arm64/20231005.1617",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -100,9 +100,9 @@
"ssh_username": "centos"
},
"centos-7": {
"ami": "ami-05c4056c36cecc136",
"ami": "ami-020fcff1da1f72f27",
"ami_description": "CI Image of CentOS 7 x86_64",
"ami_name": "salt-project/ci/centos/7/x86_64/20231003.2107",
"ami_name": "salt-project/ci/centos/7/x86_64/20231005.1616",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -110,9 +110,9 @@
"ssh_username": "centos"
},
"centosstream-8-arm64": {
"ami": "ami-0e2a761782490f7c2",
"ami": "ami-0ac6238b6506f7b8f",
"ami_description": "CI Image of CentOSStream 8 arm64",
"ami_name": "salt-project/ci/centosstream/8/arm64/20231003.2109",
"ami_name": "salt-project/ci/centosstream/8/arm64/20231005.1614",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -120,9 +120,9 @@
"ssh_username": "centos"
},
"centosstream-8": {
"ami": "ami-06178cd094ea71c34",
"ami": "ami-0bfceb03d43d0ba0e",
"ami_description": "CI Image of CentOSStream 8 x86_64",
"ami_name": "salt-project/ci/centosstream/8/x86_64/20231003.2108",
"ami_name": "salt-project/ci/centosstream/8/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -130,9 +130,9 @@
"ssh_username": "centos"
},
"centosstream-9-arm64": {
"ami": "ami-0ea1025028e6fe700",
"ami": "ami-04db23ba9082a01bf",
"ami_description": "CI Image of CentOSStream 9 arm64",
"ami_name": "salt-project/ci/centosstream/9/arm64/20231003.2109",
"ami_name": "salt-project/ci/centosstream/9/arm64/20231005.1615",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -140,9 +140,9 @@
"ssh_username": "ec2-user"
},
"centosstream-9": {
"ami": "ami-0f474b360fca72512",
"ami": "ami-0a47f4f785cb7a81c",
"ami_description": "CI Image of CentOSStream 9 x86_64",
"ami_name": "salt-project/ci/centosstream/9/x86_64/20231003.2109",
"ami_name": "salt-project/ci/centosstream/9/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -150,9 +150,9 @@
"ssh_username": "ec2-user"
},
"debian-10-arm64": {
"ami": "ami-0b9cbee875ae2e145",
"ami": "ami-015d30e48fa213528",
"ami_description": "CI Image of Debian 10 arm64",
"ami_name": "salt-project/ci/debian/10/arm64/20231003.2114",
"ami_name": "salt-project/ci/debian/10/arm64/20231005.1601",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -160,9 +160,9 @@
"ssh_username": "admin"
},
"debian-10": {
"ami": "ami-03b713e88ac915c18",
"ami": "ami-0397043698fedfa4c",
"ami_description": "CI Image of Debian 10 x86_64",
"ami_name": "salt-project/ci/debian/10/x86_64/20231003.2112",
"ami_name": "salt-project/ci/debian/10/x86_64/20231005.1606",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -170,9 +170,9 @@
"ssh_username": "admin"
},
"debian-11-arm64": {
"ami": "ami-0e48f24d9def8d84c",
"ami": "ami-008dbab5525972174",
"ami_description": "CI Image of Debian 11 arm64",
"ami_name": "salt-project/ci/debian/11/arm64/20231003.2114",
"ami_name": "salt-project/ci/debian/11/arm64/20231005.1607",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -180,9 +180,9 @@
"ssh_username": "admin"
},
"debian-11": {
"ami": "ami-07a2fb75d29d0d6f7",
"ami": "ami-04fc56501daaf3c94",
"ami_description": "CI Image of Debian 11 x86_64",
"ami_name": "salt-project/ci/debian/11/x86_64/20231003.2116",
"ami_name": "salt-project/ci/debian/11/x86_64/20231005.1607",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -190,9 +190,9 @@
"ssh_username": "admin"
},
"debian-12-arm64": {
"ami": "ami-027199ded9ce9f659",
"ami": "ami-0956b73228a7368c3",
"ami_description": "CI Image of Debian 12 arm64",
"ami_name": "salt-project/ci/debian/12/arm64/20231003.2117",
"ami_name": "salt-project/ci/debian/12/arm64/20231005.1610",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -200,9 +200,9 @@
"ssh_username": "admin"
},
"debian-12": {
"ami": "ami-02156ad853a403599",
"ami": "ami-0d0aa04bb5c49e54f",
"ami_description": "CI Image of Debian 12 x86_64",
"ami_name": "salt-project/ci/debian/12/x86_64/20231003.2119",
"ami_name": "salt-project/ci/debian/12/x86_64/20231005.1613",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -210,9 +210,9 @@
"ssh_username": "admin"
},
"fedora-37-arm64": {
"ami": "ami-0dfb1b2e3b6cd8847",
"ami": "ami-0201f64fda9f1ca6d",
"ami_description": "CI Image of Fedora 37 arm64",
"ami_name": "salt-project/ci/fedora/37/arm64/20231003.2119",
"ami_name": "salt-project/ci/fedora/37/arm64/20231005.1617",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -220,9 +220,9 @@
"ssh_username": "fedora"
},
"fedora-37": {
"ami": "ami-0d27e014bf07af18b",
"ami": "ami-02dfc80c8b14fd5bc",
"ami_description": "CI Image of Fedora 37 x86_64",
"ami_name": "salt-project/ci/fedora/37/x86_64/20231003.2120",
"ami_name": "salt-project/ci/fedora/37/x86_64/20231005.1618",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -230,9 +230,9 @@
"ssh_username": "fedora"
},
"fedora-38-arm64": {
"ami": "ami-04f5a34bae3040974",
"ami": "ami-0b03c270c7f50165d",
"ami_description": "CI Image of Fedora 38 arm64",
"ami_name": "salt-project/ci/fedora/38/arm64/20231003.2120",
"ami_name": "salt-project/ci/fedora/38/arm64/20231005.1618",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -240,9 +240,9 @@
"ssh_username": "fedora"
},
"fedora-38": {
"ami": "ami-0e69802061ed79891",
"ami": "ami-0927a80620f670c23",
"ami_description": "CI Image of Fedora 38 x86_64",
"ami_name": "salt-project/ci/fedora/38/x86_64/20231003.2123",
"ami_name": "salt-project/ci/fedora/38/x86_64/20231005.1626",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -250,9 +250,9 @@
"ssh_username": "fedora"
},
"opensuse-15": {
"ami": "ami-0ebb684e16914ad0a",
"ami": "ami-0b51e3479fabb4078",
"ami_description": "CI Image of Opensuse 15 x86_64",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20231003.2110",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20231005.1614",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -260,9 +260,9 @@
"ssh_username": "ec2-user"
},
"photonos-3-arm64": {
"ami": "ami-054765b3beb6dd97c",
"ami": "ami-0a33037524874686c",
"ami_description": "CI Image of PhotonOS 3 arm64",
"ami_name": "salt-project/ci/photonos/3/arm64/20231003.2129",
"ami_name": "salt-project/ci/photonos/3/arm64/20231005.1558",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -270,9 +270,9 @@
"ssh_username": "root"
},
"photonos-3": {
"ami": "ami-0224e8a4471113ebb",
"ami": "ami-068c5c07aa91d84d1",
"ami_description": "CI Image of PhotonOS 3 x86_64",
"ami_name": "salt-project/ci/photonos/3/x86_64/20231003.2128",
"ami_name": "salt-project/ci/photonos/3/x86_64/20231005.1558",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -280,9 +280,9 @@
"ssh_username": "root"
},
"photonos-4-arm64": {
"ami": "ami-091f6d77aa3921394",
"ami": "ami-0f8c72854c5b5679c",
"ami_description": "CI Image of PhotonOS 4 arm64",
"ami_name": "salt-project/ci/photonos/4/arm64/20231003.2124",
"ami_name": "salt-project/ci/photonos/4/arm64/20231005.1558",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -290,9 +290,9 @@
"ssh_username": "root"
},
"photonos-4": {
"ami": "ami-0714704e9471a8e0c",
"ami": "ami-04b8974b830b5adb0",
"ami_description": "CI Image of PhotonOS 4 x86_64",
"ami_name": "salt-project/ci/photonos/4/x86_64/20231003.2130",
"ami_name": "salt-project/ci/photonos/4/x86_64/20231005.1559",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -300,9 +300,9 @@
"ssh_username": "root"
},
"photonos-5-arm64": {
"ami": "ami-05ebc5bddb487c20b",
"ami": "ami-0f466b198cbcaf380",
"ami_description": "CI Image of PhotonOS 5 arm64",
"ami_name": "salt-project/ci/photonos/5/arm64/20231003.2130",
"ami_name": "salt-project/ci/photonos/5/arm64/20231005.1559",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -310,9 +310,9 @@
"ssh_username": "root"
},
"photonos-5": {
"ami": "ami-0b7e17bc1990da3af",
"ami": "ami-01bb09f84464b243e",
"ami_description": "CI Image of PhotonOS 5 x86_64",
"ami_name": "salt-project/ci/photonos/5/x86_64/20231003.2131",
"ami_name": "salt-project/ci/photonos/5/x86_64/20231005.1601",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -320,9 +320,9 @@
"ssh_username": "root"
},
"ubuntu-20.04-arm64": {
"ami": "ami-09210544c9163df86",
"ami": "ami-06d9a9e3b5ae369c7",
"ami_description": "CI Image of Ubuntu 20.04 arm64",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20231003.2110",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -330,9 +330,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-20.04": {
"ami": "ami-05894335447f4c052",
"ami": "ami-080a55fb6cb08134d",
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20231003.2110",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -340,9 +340,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04-arm64": {
"ami": "ami-090423dbe605f6d3e",
"ami": "ami-0c87b8f0b8794f32e",
"ami_description": "CI Image of Ubuntu 22.04 arm64",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20231003.2111",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -350,9 +350,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04": {
"ami": "ami-0a465357b34ea7fdc",
"ami": "ami-0ce98043f227c9ac0",
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20231003.2111",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -360,9 +360,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04-arm64": {
"ami": "ami-0ed81524d646f95ee",
"ami": "ami-0519c583e36309fef",
"ami_description": "CI Image of Ubuntu 23.04 arm64",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20231003.2111",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -370,9 +370,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04": {
"ami": "ami-02c7edd6357be51b6",
"ami": "ami-063ad5dfb49f09182",
"ami_description": "CI Image of Ubuntu 23.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20231003.2112",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -380,9 +380,9 @@
"ssh_username": "ubuntu"
},
"windows-2016": {
"ami": "ami-04f113ff291a8953f",
"ami": "ami-0f1ac34593b8b044f",
"ami_description": "CI Image of Windows 2016 x86_64",
"ami_name": "salt-project/ci/windows/2016/x86_64/20231003.2104",
"ami_name": "salt-project/ci/windows/2016/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -390,9 +390,9 @@
"ssh_username": "Administrator"
},
"windows-2019": {
"ami": "ami-06475f495e0151fc9",
"ami": "ami-09100ff6a103a28ab",
"ami_description": "CI Image of Windows 2019 x86_64",
"ami_name": "salt-project/ci/windows/2019/x86_64/20231003.2106",
"ami_name": "salt-project/ci/windows/2019/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -400,9 +400,9 @@
"ssh_username": "Administrator"
},
"windows-2022": {
"ami": "ami-0558da89560480f32",
"ami": "ami-0266dc6a12bc9fca6",
"ami_description": "CI Image of Windows 2022 x86_64",
"ami_name": "salt-project/ci/windows/2022/x86_64/20231003.2106",
"ami_name": "salt-project/ci/windows/2022/x86_64/20231005.1616",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",

View file

@ -6,7 +6,9 @@ Nox configuration script
"""
# pylint: disable=resource-leakage,3rd-party-module-not-gated
import contextlib
import datetime
import glob
import gzip
import json
import os
@ -349,8 +351,6 @@ def _run_with_coverage(session, *test_cmd, env=None, on_rerun=False):
if env is None:
env = {}
coverage_base_env = {}
sitecustomize_dir = session.run(
"salt-factories", "--coverage", silent=True, log=True, stderr=None
)
@ -382,80 +382,36 @@ def _run_with_coverage(session, *test_cmd, env=None, on_rerun=False):
python_path_entries.insert(0, str(sitecustomize_dir))
python_path_env_var = os.pathsep.join(python_path_entries)
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
coverage_base_env["COVERAGE_FILE"] = COVERAGE_FILE
env.update(
{
# The updated python path so that sitecustomize is importable
"PYTHONPATH": python_path_env_var,
# Instruct sub processes to also run under coverage
"COVERAGE_PROCESS_START": str(REPO_ROOT / ".coveragerc"),
},
**coverage_base_env,
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": COVERAGE_FILE,
}
)
try:
session.run(*test_cmd, env=env)
finally:
if os.environ.get("GITHUB_ACTIONS_PIPELINE", "0") == "0":
# Always combine and generate the XML coverage report
try:
session.run(
"coverage",
"combine",
env=coverage_base_env,
)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
# Generate report for tests code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_OUTPUT_DIR.joinpath("tests.xml").relative_to(REPO_ROOT)),
"--omit=salt/*",
"--include=tests/*,pkg/tests/*",
env=coverage_base_env,
)
# Generate report for salt code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_OUTPUT_DIR.joinpath("salt.xml").relative_to(REPO_ROOT)),
"--omit=tests/*,pkg/tests/*",
"--include=salt/*",
env=coverage_base_env,
)
# Generate html report for tests code coverage
session.run(
"coverage",
"html",
"-d",
str(COVERAGE_OUTPUT_DIR.joinpath("html").relative_to(REPO_ROOT)),
"--omit=salt/*",
"--include=tests/*,pkg/tests/*",
env=coverage_base_env,
)
# Generate html report for salt code coverage
session.run(
"coverage",
"html",
"-d",
str(COVERAGE_OUTPUT_DIR.joinpath("html").relative_to(REPO_ROOT)),
"--omit=tests/*,pkg/tests/*",
"--include=salt/*",
env=coverage_base_env,
)
session.run(*test_cmd, env=env)
def _report_coverage(session):
def _report_coverage(
session,
combine=True,
cli_report=True,
html_report=False,
xml_report=False,
json_report=False,
):
_install_coverage_requirement(session)
if not any([combine, cli_report, html_report, xml_report, json_report]):
session.error(
"At least one of combine, cli_report, html_report, xml_report, json_report needs to be True"
)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
@ -466,45 +422,56 @@ def _report_coverage(session):
if session.posargs:
report_section = session.posargs.pop(0)
if report_section not in ("salt", "tests"):
session.error("The report section can only be one of 'salt', 'tests'.")
session.error(
f"The report section can only be one of 'salt', 'tests', not: {report_section}"
)
if session.posargs:
session.error(
"Only one argument can be passed to the session, which is optional "
"and is one of 'salt', 'tests'."
)
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine", env=env)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
if combine is True:
coverage_db_files = glob.glob(f"{COVERAGE_FILE}.*")
if coverage_db_files:
with contextlib.suppress(CommandFailed):
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
session.run("coverage", "combine", env=env)
elif os.path.exists(COVERAGE_FILE):
session_warn(session, "Coverage files already combined.")
if not IS_WINDOWS:
# The coverage file might have come from a windows machine, fix paths
with sqlite3.connect(COVERAGE_FILE) as db:
res = db.execute(r"SELECT * FROM file WHERE path LIKE '%salt\%'")
if res.fetchone():
session_warn(
session,
"Replacing backwards slashes with forward slashes on file "
"paths in the coverage database",
)
db.execute(r"UPDATE OR IGNORE file SET path=replace(path, '\', '/');")
if os.path.exists(COVERAGE_FILE) and not IS_WINDOWS:
# Some coverage files might have come from a windows machine, fix paths
with sqlite3.connect(COVERAGE_FILE) as db:
res = db.execute(r"SELECT * FROM file WHERE path LIKE '%salt\%'")
if res.fetchone():
session_warn(
session,
"Replacing backwards slashes with forward slashes on file "
"paths in the coverage database",
)
db.execute(
r"UPDATE OR IGNORE file SET path=replace(path, '\', '/');"
)
if not os.path.exists(COVERAGE_FILE):
session.error("No coverage files found.")
if report_section == "salt":
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-salt.json"
)
json_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "salt.json"
xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "salt.xml"
html_coverage_dir = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "salt"
cmd_args = [
"--omit=tests/*,pkg/tests/*",
"--include=salt/*",
]
elif report_section == "tests":
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-tests.json"
json_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "tests.json"
xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "tests.xml"
html_coverage_dir = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "tests"
)
cmd_args = [
"--omit=salt/*",
@ -514,25 +481,58 @@ def _report_coverage(session):
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage.json"
)
xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage.xml"
html_coverage_dir = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "full"
cmd_args = [
"--include=salt/*,tests/*,pkg/tests/*",
]
session.run(
"coverage",
"report",
*cmd_args,
env=env,
)
if cli_report:
session.run(
"coverage",
"report",
"--precision=2",
*cmd_args,
env=env,
)
session.run(
"coverage",
"json",
"-o",
str(json_coverage_file),
*cmd_args,
env=env,
)
if html_report:
session.run(
"coverage",
"html",
"-d",
str(html_coverage_dir),
"--show-contexts",
"--precision=2",
*cmd_args,
env=env,
)
if xml_report:
try:
session.run(
"coverage",
"xml",
"-o",
str(xml_coverage_file),
*cmd_args,
env=env,
)
except CommandFailed:
session_warn(
session, "Failed to generate the source XML code coverage report"
)
if json_report:
session.run(
"coverage",
"json",
"-o",
str(json_coverage_file),
"--show-contexts",
*cmd_args,
env=env,
)
@nox.session(python=_PYTHON_VERSIONS, name="test-parametrized")
@ -1215,7 +1215,12 @@ def ci_test_onedir_tcp(session):
@nox.session(python="3", name="report-coverage")
def report_coverage(session):
_report_coverage(session)
_report_coverage(session, combine=True, cli_report=True)
@nox.session(python="3", name="coverage-report")
def coverage_report(session):
_report_coverage(session, combine=True, cli_report=True)
@nox.session(python=False, name="decompress-dependencies")
@ -1342,20 +1347,7 @@ def pre_archive_cleanup(session, pkg):
@nox.session(python="3", name="combine-coverage")
def combine_coverage(session):
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_FILE),
}
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine", env=env)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
_report_coverage(session, combine=True, cli_report=False)
@nox.session(
@ -1364,135 +1356,21 @@ def combine_coverage(session):
venv_params=["--system-site-packages"],
)
def combine_coverage_onedir(session):
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_FILE),
}
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine", env=env)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
_report_coverage(session, combine=True, cli_report=False)
@nox.session(python="3", name="create-html-coverage-report")
def create_html_coverage_report(session):
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_FILE),
}
report_section = None
if session.posargs:
report_section = session.posargs.pop(0)
if report_section not in ("salt", "tests"):
session.error("The report section can only be one of 'salt', 'tests'.")
if session.posargs:
session.error(
"Only one argument can be passed to the session, which is optional "
"and is one of 'salt', 'tests'."
)
if not IS_WINDOWS:
# The coverage file might have come from a windows machine, fix paths
with sqlite3.connect(COVERAGE_FILE) as db:
res = db.execute(r"SELECT * FROM file WHERE path LIKE '%salt\%'")
if res.fetchone():
session_warn(
session,
"Replacing backwards slashes with forward slashes on file "
"paths in the coverage database",
)
db.execute(r"UPDATE OR IGNORE file SET path=replace(path, '\', '/');")
if report_section == "salt":
report_dir = str(
COVERAGE_OUTPUT_DIR.joinpath("html", "salt").relative_to(REPO_ROOT)
)
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-salt.json"
)
cmd_args = [
"--omit=tests/*,pkg/tests/*",
"--include=salt/*",
]
elif report_section == "tests":
report_dir = str(
COVERAGE_OUTPUT_DIR.joinpath("html", "tests").relative_to(REPO_ROOT)
)
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-tests.json"
)
cmd_args = [
"--omit=salt/*",
"--include=tests/*,pkg/tests/*",
]
else:
report_dir = str(
COVERAGE_OUTPUT_DIR.joinpath("html", "full").relative_to(REPO_ROOT)
)
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage.json"
)
cmd_args = [
"--include=salt/*,tests/*,pkg/tests/*",
]
# Generate html report for Salt and tests combined code coverage
session.run(
"coverage",
"html",
"-d",
report_dir,
"--show-contexts",
*cmd_args,
env=env,
)
_report_coverage(session, combine=True, cli_report=False, html_report=True)
def _create_xml_coverage_reports(session):
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_FILE),
}
# Generate report for tests code coverage
try:
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_OUTPUT_DIR.joinpath("tests.xml").relative_to(REPO_ROOT)),
"--omit=salt/*",
"--include=tests/*,pkg/tests/*",
env=env,
)
except CommandFailed:
session_warn(session, "Failed to generate the tests XML code coverage report")
# Generate report for salt code coverage
try:
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_OUTPUT_DIR.joinpath("salt.xml").relative_to(REPO_ROOT)),
"--omit=tests/*,pkg/tests/*",
"--include=salt/*",
env=env,
)
except CommandFailed:
session_warn(session, "Failed to generate the source XML code coverage report")
if session.posargs:
session.error("No arguments are acceptable to this nox session.")
session.posargs.append("salt")
_report_coverage(session, combine=True, cli_report=False, xml_report=True)
session.posargs.append("tests")
_report_coverage(session, combine=True, cli_report=False, xml_report=True)
@nox.session(python="3", name="create-xml-coverage-reports")
@ -1509,6 +1387,20 @@ def create_xml_coverage_reports_onedir(session):
_create_xml_coverage_reports(session)
@nox.session(python="3", name="create-json-coverage-reports")
def create_json_coverage_reports(session):
_report_coverage(session, combine=True, cli_report=False, json_report=True)
@nox.session(
python=str(ONEDIR_PYTHON_PATH),
name="create-json-coverage-reports-onedir",
venv_params=["--system-site-packages"],
)
def create_json_coverage_reports_onedir(session):
_report_coverage(session, combine=True, cli_report=False, json_report=True)
class Tee:
"""
Python class to mimic linux tee behaviour

View file

@ -49,18 +49,6 @@ def _system_up_to_date(
grains,
shell,
):
if grains["os"] == "Ubuntu" and grains["osarch"] == "amd64":
# The grub-efi-amd64-signed package seems to be a problem
# right now when updating the system
env = os.environ.copy()
env["DEBIAN_FRONTEND"] = "noninteractive"
ret = shell.run(
"apt-mark",
"hold",
"grub-efi-amd64-signed",
env=env,
)
assert ret.returncode == 0
if grains["os_family"] == "Debian":
ret = shell.run("apt", "update")
assert ret.returncode == 0

View file

@ -14,7 +14,6 @@ pytestmark = [
pytest.mark.skip_unless_on_windows,
pytest.mark.slow_test,
pytest.mark.destructive_test,
pytest.mark.skip_on_windows,
]

View file

@ -14,7 +14,6 @@ pytestmark = [
pytest.mark.skip_unless_on_windows,
pytest.mark.slow_test,
pytest.mark.destructive_test,
pytest.mark.skip_on_windows,
]

View file

@ -1143,3 +1143,162 @@ def test_verify_high_too_many_functions_declared_error_message(
res = state_obj.verify_high(high)
assert isinstance(res, list)
assert any(err_msg in x for x in res)
def test_load_modules_pkg(minion_opts):
"""
Test load_modules when using this state:
nginx:
pkg.installed:
- provider: pacmanpkg
"""
data = {
"state": "pkg",
"name": "nginx",
"__sls__": "test",
"__env__": "base",
"__id__": "nginx",
"provider": "pacmanpkg",
"order": 10000,
"fun": "installed",
}
with patch("salt.state.State._gather_pillar"):
state_obj = salt.state.State(minion_opts)
state_obj.load_modules(data)
for func in [
"pkg.available_version",
"pkg.file_list",
"pkg.group_diff",
"pkg.group_info",
]:
assert func in state_obj.functions
def test_load_modules_list(minion_opts):
"""
Test load_modules when using providers in state
as a list, with this state:
nginx:
pkg.installed:
- provider:
- cmd: cmdmod
"""
data = {
"state": "pkg",
"name": "nginx",
"__sls__": "test",
"__env__": "base",
"__id__": "nginx",
"provider": [OrderedDict([("cmd", "cmdmod")])],
"order": 10000,
"fun": "installed",
}
with patch("salt.state.State._gather_pillar"):
state_obj = salt.state.State(minion_opts)
state_obj.load_modules(data)
for func in ["cmd.exec_code", "cmd.run", "cmd.script"]:
assert func in state_obj.functions
def test_load_modules_dict(minion_opts):
"""
Test load_modules when providers is a dict, which is
not valid. Testing this state:
nginx:
pkg.installed:
- provider: {cmd: test}
"""
data = {
"state": "pkg",
"name": "nginx",
"__sls__": "test",
"__env__": "base",
"__id__": "nginx",
"provider": OrderedDict([("cmd", "test")]),
"order": 10000,
"fun": "installed",
}
mock_raw_mod = MagicMock()
patch_raw_mod = patch("salt.loader.raw_mod", mock_raw_mod)
with patch("salt.state.State._gather_pillar"):
with patch_raw_mod:
state_obj = salt.state.State(minion_opts)
state_obj.load_modules(data)
mock_raw_mod.assert_not_called()
def test_check_refresh_grains(minion_opts):
"""
Test check_refresh when using this state:
grains_refresh:
module.run:
- name: saltutil.refresh_grains
- reload_grains: true
Ensure that the grains are loaded when reload_grains
is set.
"""
data = {
"state": "module",
"name": "saltutil.refresh_grains",
"__sls__": "test",
"__env__": "base",
"__id__": "grains_refresh",
"reload_grains": True,
"order": 10000,
"fun": "run",
}
ret = {
"name": "saltutil.refresh_grains",
"changes": {"ret": True},
"comment": "Module function saltutil.refresh_grains executed",
"result": True,
"__sls__": "test",
"__run_num__": 0,
}
mock_refresh = MagicMock()
patch_refresh = patch("salt.state.State.module_refresh", mock_refresh)
with patch("salt.state.State._gather_pillar"):
with patch_refresh:
state_obj = salt.state.State(minion_opts)
state_obj.check_refresh(data, ret)
mock_refresh.assert_called_once()
assert "cwd" in state_obj.opts["grains"]
def test_check_refresh_pillar(minion_opts, caplog):
"""
Test check_refresh when using this state:
pillar_refresh:
module.run:
- name: saltutil.refresh_pillar
- reload_pillar: true
Ensure the pillar is refreshed.
"""
data = {
"state": "module",
"name": "saltutil.refresh_pillar",
"__sls__": "test",
"__env__": "base",
"__id__": "pillar_refresh",
"reload_pillar": True,
"order": 10000,
"fun": "run",
}
ret = {
"name": "saltutil.refresh_pillar",
"changes": {"ret": False},
"comment": "Module function saltutil.refresh_pillar executed",
"result": False,
"__sls__": "test",
"__run_num__": 0,
}
mock_refresh = MagicMock()
patch_refresh = patch("salt.state.State.module_refresh", mock_refresh)
mock_pillar = MagicMock()
patch_pillar = patch("salt.state.State._gather_pillar", mock_pillar)
with patch_pillar, patch_refresh:
with caplog.at_level(logging.DEBUG):
state_obj = salt.state.State(minion_opts)
state_obj.check_refresh(data, ret)
mock_refresh.assert_called_once()
assert "Refreshing pillar..." in caplog.text

View file

@ -8,11 +8,13 @@
import os
import string
import tempfile
import pytest
import salt.utils.cloud as cloud
from salt.exceptions import SaltCloudException
from salt.utils.cloud import __ssh_gateway_arguments as ssh_gateway_arguments
from tests.support.mock import MagicMock, patch
@ -74,7 +76,7 @@ def create_class(tmp_path):
os.chdir(old_cwd)
def test_ssh_password_regex(create_class):
def test_ssh_password_regex():
"""Test matching ssh password patterns"""
for pattern in (
"Password for root@127.0.0.1:",
@ -125,7 +127,7 @@ def test_retrieve_password_from_keyring(create_class):
assert pw_in_keyring == "fake_password_c8231"
def test_sftp_file_with_content_under_python3(create_class):
def test_sftp_file_with_content_under_python3():
with pytest.raises(Exception) as context:
cloud.sftp_file("/tmp/test", "ТЕSТ test content")
# we successful pass the place with os.write(tmpfd, ...
@ -133,7 +135,7 @@ def test_sftp_file_with_content_under_python3(create_class):
@pytest.mark.skip_on_windows(reason="Not applicable for Windows.")
def test_check_key_path_and_mode(create_class):
def test_check_key_path_and_mode():
with tempfile.NamedTemporaryFile() as f:
key_file = f.name
@ -657,3 +659,116 @@ def test_deploy_windows_master(master, expected):
expected_args = "/S /master={} /minion-name=None".format(expected)
assert mock.call_args_list[0].args[0] == expected_cmd
assert mock.call_args_list[0].args[1] == expected_args
def test___ssh_gateway_config_dict():
assert cloud.__ssh_gateway_config_dict(None) == {}
gate = {
"ssh_gateway": "Gozar",
"ssh_gateway_key": "Zuul",
"ssh_gateway_user": "Vinz Clortho",
"ssh_gateway_command": "Are you the keymaster?",
}
assert cloud.__ssh_gateway_config_dict(gate) == gate
def test_ip_to_int():
assert cloud.ip_to_int("127.0.0.1") == 2130706433
def test_is_public_ip():
assert cloud.is_public_ip("8.8.8.8") is True
assert cloud.is_public_ip("127.0.0.1") is False
assert cloud.is_public_ip("172.17.3.1") is False
assert cloud.is_public_ip("192.168.30.4") is False
assert cloud.is_public_ip("10.145.1.1") is False
assert cloud.is_public_ip("fe80::123:ffff:ffff:ffff") is False
assert cloud.is_public_ip("2001:db8:3333:4444:CCCC:DDDD:EEEE:FFFF") is True
def test_check_name():
try:
cloud.check_name("test", string.ascii_letters)
except SaltCloudException as exc:
assert False, f"cloud.check_name rasied SaltCloudException: {exc}"
with pytest.raises(SaltCloudException):
cloud.check_name("test", string.digits)
def test__strip_cache_events():
events = {
"test": "foobar",
"passwd": "fakepass",
}
events2 = {"test1": "foobar", "test2": "foobar"}
opts = {"cache_event_strip_fields": ["passwd"]}
assert cloud._strip_cache_events(events, opts) == {"test": "foobar"}
assert cloud._strip_cache_events(events2, opts) == events2
def test_salt_cloud_force_asciii():
try:
"\u0411".encode("iso-8859-15")
except UnicodeEncodeError as exc:
with pytest.raises(UnicodeEncodeError):
cloud._salt_cloud_force_ascii(exc)
with pytest.raises(TypeError):
cloud._salt_cloud_force_ascii("not the thing")
try:
"\xa0\u2013".encode("iso-8859-15")
except UnicodeEncodeError as exc:
assert cloud._salt_cloud_force_ascii(exc) == ("-", 2)
def test__unwrap_dict():
assert cloud._unwrap_dict({"a": {"b": {"c": "foobar"}}}, "a,b,c") == "foobar"
def test_get_salt_interface():
with patch(
"salt.config.get_cloud_config_value",
MagicMock(side_effect=[False, "public_ips"]),
) as cloud_config:
assert cloud.get_salt_interface({}, {}) == "public_ips"
assert cloud_config.call_count == 2
with patch(
"salt.config.get_cloud_config_value", MagicMock(return_value="private_ips")
) as cloud_config:
assert cloud.get_salt_interface({}, {}) == "private_ips"
assert cloud_config.call_count == 1
def test_userdata_template():
assert cloud.userdata_template(opts=None, vm_=None, userdata=None) is None
with patch("salt.config.get_cloud_config_value", MagicMock(return_value=False)):
assert cloud.userdata_template(opts=None, vm_=None, userdata="test") == "test"
with patch("salt.config.get_cloud_config_value", MagicMock(return_value=None)):
opts = {"userdata_template": None}
assert cloud.userdata_template(opts=opts, vm_=None, userdata="test") == "test"
renders = {"jinja": MagicMock(return_value="test")}
with patch("salt.config.get_cloud_config_value", MagicMock(return_value="jinja")):
with patch("salt.loader.render", MagicMock(return_value=renders)):
opts = {
"userdata_template": "test",
"renderer_blacklist": None,
"renderer_whitelist": None,
"renderer": "jinja",
}
assert cloud.userdata_template(opts=opts, vm_={}, userdata="test") == "test"
renders = {"jinja": MagicMock(return_value=True)}
with patch("salt.config.get_cloud_config_value", MagicMock(return_value="jinja")):
with patch("salt.loader.render", MagicMock(return_value=renders)):
opts = {
"userdata_template": "test",
"renderer_blacklist": None,
"renderer_whitelist": None,
"renderer": "jinja",
}
assert cloud.userdata_template(opts=opts, vm_={}, userdata="test") == "True"

View file

@ -622,10 +622,10 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False):
"""
_matrix = []
_splits = {
"functional": 4,
"integration": 6,
"functional": 5,
"integration": 7,
"scenarios": 2,
"unit": 3,
"unit": 4,
}
for transport in ("zeromq", "tcp"):
if transport == "tcp":
@ -769,8 +769,6 @@ def pkg_matrix(
"version": version,
}
)
if distro_slug.startswith("windows"):
_matrix[-1]["pkg-type"] = pkg_type.upper()
ctx.info("Generated matrix:")
ctx.print(_matrix, soft_wrap=True)

View file

@ -1448,15 +1448,13 @@ class VM:
"""
Combine the code coverage databases
"""
return self.run_nox("combine-coverage-onedir", session_args=[self.name])
return self.run_nox("combine-coverage-onedir")
def create_xml_coverage_reports(self):
"""
Create XML coverage reports
"""
return self.run_nox(
"create-xml-coverage-reports-onedir", session_args=[self.name]
)
return self.run_nox("create-xml-coverage-reports-onedir")
def compress_dependencies(self):
"""