Merge pull request #65299 from s0undt3ch/hotfix/merge-forward

[master] Merge 3006.x into master
This commit is contained in:
Pedro Algarvio 2023-10-11 15:13:59 +01:00 committed by GitHub
commit c18760559a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
130 changed files with 9760 additions and 5686 deletions

View file

@ -1,112 +1,72 @@
codecov:
ci:
- jenkins.saltproject.io
- github.com
max_report_age: 24 # The age you want coverage reports to expire at, or if you
# want to disable this check. Expired reports will not be processed by codecov.
require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
max_report_age: 72 # The age, in hours, you want coverage reports to expire at, or if you
# want to disable this check. Expired reports will not be processed by codecov.
require_ci_to_pass: false
# notify:
# after_n_builds: 25 # Only notify after N builds
# # This value is the output of:
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
# wait_for_ci: yes # Should Codecov wait for all CI statuses to complete before sending ours.
# # Note: Codecov considers all non-codecov statues to be CI statuses
notify:
wait_for_ci: true # Should Codecov wait for all CI statuses to complete before sending ours.
# Note: Codecov considers all non-codecov statuses to be CI statuses
# manual_trigger: true # We manually tell codecov to merge and process all uploaded coverage reports
ignore:
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
- salt/ext/.* # ignore any code under salt/ext
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
- salt/ext/.* # ignore any code under salt/ext
coverage:
round: up
range: 70..100
precision: 2
notify: {}
status: false
status:
project:
default:
target: auto # auto compares coverage to the previous base commit
threshold: 5% # adjust accordingly based on how flaky your tests are
# this allows a 5% drop from the previous base commit coverage
flags:
- salt
- tests
# status:
# project: # measuring the overall project coverage
# default:
# informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# # resulting status will pass no matter what the coverage is or what other settings
# # are specified. Informational mode is great to use if you want to expose codecov
# # information to other developers in your pull request without necessarily gating
# # PRs on that information.
# target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
# flags:
# - salt
# - tests
# salt: # declare a new status context "salt"
# informational: true
# paths: "!tests/" # remove all files in "tests/"
# target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# # options: success, error, failure
# if_not_found: success # if parent is not found report status as success, error, or failure
# if_ci_failed: error # if ci fails report status as success, error, or failure
# flags:
# - salt
# tests: # declare a new status context "tests"
# informational: true
# #target: 100% # we always want 100% coverage here
# target: auto # auto while we get this going
# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
# paths: "!salt/" # only include coverage in "tests/" folder
# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# # options: success, error, failure
# if_not_found: success # if parent is not found report status as success, error, or failure
# if_ci_failed: error # if ci fails report status as success, error, or failure
# flags:
# - tests
salt: # declare a new status context "salt"
paths:
- "!tests/" # remove all files in "tests/"
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
flags:
- salt
# patch: # pull requests only: this commit status will measure the
# # entire pull requests Coverage Diff. Checking if the lines
# # adjusted are covered at least X%.
# default:
# informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# # resulting status will pass no matter what the coverage is or what other settings
# # are specified. Informational mode is great to use if you want to expose codecov
# # information to other developers in your pull request without necessarily gating
# # PRs on that information.
# target: 100% # Newly added lines must have 100% coverage
# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# # options: success, error, failure
# if_not_found: success
# if_ci_failed: error
# flags:
# - salt
# - tests
tests: # declare a new status context "tests"
paths:
- "!salt/" # only include coverage in "tests/" folder
target: auto # auto while we get this going
flags:
- tests
# changes: # if there are any unexpected changes in coverage
# default:
# informational: true # Use Codecov in informational mode. Default is false. If true is specified the
# # resulting status will pass no matter what the coverage is or what other settings
# # are specified. Informational mode is great to use if you want to expose codecov
# # information to other developers in your pull request without necessarily gating
# # PRs on that information.
# if_no_uploads: error
# if_not_found: success
# if_ci_failed: error
# flags:
# - salt
# - tests
patch: # pull requests only: this commit status will measure the
# entire pull requests Coverage Diff. Checking if the lines
# adjusted are covered at least X%.
default:
target: auto # auto compares coverage to the previous base commit
threshold: 5% # adjust accordingly based on how flaky your tests are
# this allows a 5% drop from the previous base commit coverage
flags:
salt:
paths:
- salt/
carryforward: true # https://docs.codecov.io/docs/carryforward-flags
carryforward: true # https://docs.codecov.io/docs/carryforward-flags
tests:
paths:
- tests/
carryforward: true
pkg:
paths:
- pkg/tests
carryforward: true
unit:
paths:
- tests/unit
@ -125,23 +85,10 @@ flags:
- tests/integration
- tests/pytests/integration
carryforward: true
system:
paths:
- tests/integration
- tests/pytests/integration
carryforward: true
#comment:
# layout: "reach, diff, flags, files"
# after_n_builds: 46 # Only comment on PRs after N builds
# # This value is the output of:
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
#
# behavior: new # Comment posting behaviour
# # default: update, if exists. Otherwise post new.
# # once: update, if exists. Otherwise post new. Skip if deleted.
# # new: delete old and post new.
# # spammy: post new (do not delete old comments).
#
# Disable Comments
comment: off
comment:
layout: "reach, diff, flags, files"
behavior: default # Comment posting behaviour
# default: update, if exists. Otherwise post new.
# once: update, if exists. Otherwise post new. Skip if deleted.
# new: delete old and post new.

View file

@ -7,6 +7,13 @@ relative_files = True
omit =
setup.py
.nox/*
source_pkgs =
pkg.tests
salt
tests
tools
disable_warnings = module-not-imported
[report]
# Regexes for lines to exclude from consideration
@ -32,7 +39,7 @@ ignore_errors = True
[paths]
salt =
salt/
artifacts/salt
artifacts/salt/lib/python3.*/site-packages/salt
**/testing/salt/
**\testing\salt
tests =

View file

@ -43,13 +43,15 @@ runs:
with:
path: artifacts/${{ inputs.package-name }}
key: >
${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|
${{ hashFiles(
format('{0}/.relenv/**/*.xz', github.workspace),
'requirements/static/pkg/*/*.txt',
'.github/actions/build-onedir-deps/action.yml',
'cicd/shared-gh-workflows-context.yml'
) }}
${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{
hashFiles(
format('{0}/.relenv/**/*.xz', github.workspace),
'requirements/static/pkg/*/*.txt',
'.github/actions/build-onedir-deps/action.yml',
'.github/workflows/build-deps-onedir-*.yml',
'cicd/shared-gh-workflows-context.yml'
)
}}
- name: Install Salt Onedir Package Dependencies
shell: bash

View file

@ -47,13 +47,15 @@ runs:
with:
path: artifacts/${{ inputs.package-name }}
key: >
${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|
${{ hashFiles(
format('{0}/.relenv/**/*.xz', github.workspace),
'requirements/static/pkg/*/*.txt',
'.github/actions/build-onedir-deps/action.yml',
'cicd/shared-gh-workflows-context.yml'
) }}
${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{
hashFiles(
format('{0}/.relenv/**/*.xz', github.workspace),
'requirements/static/pkg/*/*.txt',
'.github/actions/build-onedir-deps/action.yml',
'.github/workflows/build-deps-onedir-*.yml',
'cicd/shared-gh-workflows-context.yml'
)
}}
- name: Download Source Tarball
uses: actions/download-artifact@v3

View file

@ -1,5 +1,5 @@
---
name: Build Debian Packages
name: Build DEB Packages
on:
workflow_call:
@ -16,6 +16,17 @@ on:
required: true
type: string
description: The version of python to use with relenv
source:
required: true
type: string
description: The backend to build the packages with
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build:
@ -31,19 +42,18 @@ jobs:
- x86_64
- aarch64
source:
- onedir
- src
- ${{ inputs.source }}
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12
steps:
# Checkout here so we can easily use custom actions
- uses: actions/checkout@v3
- uses: actions/checkout@v4
# Checkout here for the build process
- name: Checkout in build directory
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path:
pkgs/checkout/
@ -89,7 +99,7 @@ jobs:
working-directory: pkgs/checkout/
run: |
tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
matrix.source == 'onedir' &&
inputs.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
@ -102,7 +112,7 @@ jobs:
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ matrix.source }}" != "src" ]; then
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"

View file

@ -0,0 +1,135 @@
---
name: Install Test Dependencies
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
nox-session:
required: true
type: string
description: The nox session to run
salt-version:
type: string
required: true
description: The Salt version to set prior to running tests.
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
env:
COLUMNS: 190
PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/"
PIP_EXTRA_INDEX_URL: "https://pypi.org/simple"
GITHUB_ACTIONS_PIPELINE: "1"
jobs:
dependencies:
name: Install
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 90
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
- name: Install System Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
brew install openssl@3
- name: Install Nox
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
run: |
export PYCURL_SSL_LIBRARY=openssl
export LDFLAGS="-L/usr/local/opt/openssl@3/lib"
export CPPFLAGS="-I/usr/local/opt/openssl@3/include"
export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig"
nox --install-only -e ${{ inputs.nox-session }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox -e "pre-archive-cleanup(pkg=False)"
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: nox.${{ inputs.distro-slug }}.tar.*

View file

@ -0,0 +1,158 @@
---
name: Install Test Dependencies
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
nox-session:
required: true
type: string
description: The nox session to run
salt-version:
type: string
required: true
description: The Salt version to set prior to running tests.
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
dependencies:
name: Install
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 90
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: PyPi Proxy
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: nox.${{ inputs.distro-slug }}.tar.*

View file

@ -0,0 +1,81 @@
---
name: Build Packaging Dependencies Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build-deps-linux:
name: Linux
if: ${{ inputs.self-hosted-runners }}
strategy:
fail-fast: false
matrix:
arch:
- x86_64
- aarch64
runs-on:
- self-hosted
- linux
- ${{ matrix.arch }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: linux
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: linux
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -0,0 +1,83 @@
---
name: Build Packaging Dependencies Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build-deps-macos:
name: macOS
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86_64
runs-on: macos-12
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: darwin
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: darwin
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -0,0 +1,84 @@
---
name: Build Packaging Dependencies Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build-deps-windows:
name: Windows
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86
- amd64
runs-on: windows-latest
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: windows
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: windows
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -1,150 +0,0 @@
---
name: Build Packaging Dependencies Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
jobs:
build-deps-linux:
name: Linux
if: ${{ inputs.self-hosted-runners }}
strategy:
fail-fast: false
matrix:
arch:
- x86_64
- aarch64
runs-on:
- self-hosted
- linux
- ${{ matrix.arch }}
steps:
- uses: actions/checkout@v3
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: linux
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: linux
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}
build-deps-windows:
name: Windows
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86
- amd64
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: windows
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: windows
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}
build-deps-macos:
name: macOS
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86_64
runs-on: macos-12
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: darwin
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: darwin
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -13,6 +13,13 @@ on:
type: string
description: Seed used to invalidate caches
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build:
name: Build
@ -32,7 +39,7 @@ jobs:
# - pdf
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}

View file

@ -24,6 +24,15 @@ on:
type: string
description: The GitHub Environment where this workflow should run
default: ci
source:
required: true
type: string
description: The backend to build the packages with
env:
COLUMNS: 190
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
@ -36,8 +45,7 @@ jobs:
arch:
- x86_64
source:
- onedir
- src
- ${{ inputs.source }}
runs-on:
- macos-12
@ -66,7 +74,7 @@ jobs:
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
fi
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.9
@ -114,7 +122,7 @@ jobs:
APP_SPEC_PWD: "${{ secrets.MAC_SIGN_APP_SPEC_PWD }}"
run: |
tools pkg build macos --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
matrix.source == 'onedir' &&
inputs.source == 'onedir' &&
format(
'--onedir salt-{0}-onedir-darwin-{1}.tar.xz --salt-version {0} {2}',
inputs.salt-version,
@ -128,7 +136,7 @@ jobs:
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ matrix.source }}" != "src" ]; then
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos-from-src" >> "$GITHUB_OUTPUT"

View file

@ -16,9 +16,17 @@ on:
required: true
type: string
description: The version of python to use with relenv
source:
required: true
type: string
description: The backend to build the packages with
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build:
@ -34,14 +42,13 @@ jobs:
- x86_64
- aarch64
source:
- onedir
- src
- ${{ inputs.source }}
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
@ -77,7 +84,7 @@ jobs:
- name: Build RPM
run: |
tools pkg build rpm --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
matrix.source == 'onedir' &&
inputs.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
@ -86,7 +93,7 @@ jobs:
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ matrix.source }}" != "src" ]; then
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"

View file

@ -0,0 +1,88 @@
---
name: Build Salt Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build-salt-linux:
name: Linux
if: ${{ inputs.self-hosted-runners }}
strategy:
fail-fast: false
matrix:
arch:
- x86_64
- aarch64
runs-on:
- self-hosted
- linux
- ${{ matrix.arch }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Setup Relenv
uses: ./.github/actions/setup-relenv
id: setup-relenv
with:
platform: linux
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:
platform: linux
arch: ${{ matrix.arch }}
salt-version: "${{ inputs.salt-version }}"
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -0,0 +1,89 @@
---
name: Build Salt Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build-salt-macos:
name: macOS
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86_64
runs-on: macos-12
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: darwin
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:
platform: darwin
arch: ${{ matrix.arch }}
salt-version: "${{ inputs.salt-version }}"
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -0,0 +1,91 @@
---
name: Build Salt Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
build-salt-windows:
name: Windows
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86
- amd64
runs-on: windows-latest
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: windows
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:
platform: windows
arch: ${{ matrix.arch }}
salt-version: "${{ inputs.salt-version }}"
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -1,170 +0,0 @@
---
name: Build Salt Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
jobs:
build-salt-linux:
name: Linux
if: ${{ inputs.self-hosted-runners }}
strategy:
fail-fast: false
matrix:
arch:
- x86_64
- aarch64
runs-on:
- self-hosted
- linux
- ${{ matrix.arch }}
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Setup Relenv
uses: ./.github/actions/setup-relenv
id: setup-relenv
with:
platform: linux
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:
platform: linux
arch: ${{ matrix.arch }}
salt-version: "${{ inputs.salt-version }}"
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}
build-salt-windows:
name: Windows
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86
- amd64
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: windows
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:
platform: windows
arch: ${{ matrix.arch }}
salt-version: "${{ inputs.salt-version }}"
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}
build-salt-macos:
name: macOS
if: ${{ inputs.github-hosted-runners }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
arch:
- x86_64
runs-on: macos-12
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: darwin
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:
platform: darwin
arch: ${{ matrix.arch }}
salt-version: "${{ inputs.salt-version }}"
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -24,6 +24,17 @@ on:
type: string
description: The GitHub Environment where this workflow should run
default: ci
source:
required: true
type: string
description: The backend to build the packages with
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
@ -38,8 +49,7 @@ jobs:
- x86
- amd64
source:
- onedir
- src
- ${{ inputs.source }}
runs-on:
- windows-latest
@ -75,7 +85,7 @@ jobs:
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
fi
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.9
@ -108,7 +118,7 @@ jobs:
- name: Build Windows Packages
run: |
tools pkg build windows --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
matrix.source == 'onedir' &&
inputs.source == 'onedir' &&
format(
'--onedir salt-{0}-onedir-windows-{1}.zip --salt-version {0} --arch {1} {2}',
inputs.salt-version,
@ -123,7 +133,7 @@ jobs:
id: set-artifact-name
shell: bash
run: |
if [ "${{ matrix.source }}" != "src" ]; then
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name-nsis=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS" >> "$GITHUB_OUTPUT"
echo "artifact-name-msi=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI" >> "$GITHUB_OUTPUT"
else

1233
.github/workflows/ci.yml vendored

File diff suppressed because it is too large Load diff

View file

@ -34,7 +34,7 @@ jobs:
run: |
git config --global --add safe.directory "$(pwd)"
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Nox
run: |
@ -79,7 +79,7 @@ jobs:
run: |
git config --global --add safe.directory "$(pwd)"
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install Nox
run: |

File diff suppressed because it is too large Load diff

View file

@ -37,7 +37,7 @@ jobs:
run: |
git config --global --add safe.directory "$(pwd)"
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: ./.github/actions/setup-actionlint
with:
cache-seed: ${{ inputs.cache-seed }}

View file

@ -41,7 +41,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
repository: ${{ github.event.inputs.saltRepo }}
ref: ${{ github.event.inputs.saltBranch }}
@ -58,7 +58,7 @@ jobs:
with:
python-version: 3.8
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install pypa/build
run: |
python -m pip install build --user

View file

@ -23,12 +23,12 @@ jobs:
steps:
- name: Checkout Salt
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: salt
- name: Checkout WinRepo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: winrepo
repository: saltstack/salt-winrepo-ng

View file

@ -16,6 +16,13 @@ on:
permissions:
contents: read
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
upload-virustotal:
name: Upload VirusTotal
@ -27,7 +34,7 @@ jobs:
steps:
- name: Checkout Salt
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Set Up Python 3.10
uses: actions/setup-python@v4

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,86 @@
<%- for slug, display_name, arch in build_ci_deps_listing["windows"] %>
<{ slug.replace(".", "") }>-ci-deps:
<%- do test_salt_needs.append(slug.replace(".", "") + "-ci-deps") %>
name: <{ display_name }> Deps
<%- if workflow_slug != 'release' %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- endif %>
needs:
- prepare-workflow
<%- if workflow_slug != 'release' %>
- build-salt-onedir-windows
<%- else %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: <{ slug }>
nox-session: ci-test-onedir
platform: windows
arch: amd64
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
<%- endfor %>
<%- for slug, display_name, arch in build_ci_deps_listing["macos"] %>
<{ slug.replace(".", "") }>-ci-deps:
<%- do test_salt_needs.append(slug.replace(".", "") + "-ci-deps") %>
name: <{ display_name }> Deps
<%- if workflow_slug != 'release' %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
<%- endif %>
needs:
- prepare-workflow
<%- if workflow_slug != 'release' %>
- build-salt-onedir-macos
<%- else %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/build-deps-ci-action-macos.yml
with:
distro-slug: <{ slug }>
nox-session: ci-test-onedir
platform: darwin
arch: x86_64
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
<%- endfor %>
<%- for slug, display_name, arch in build_ci_deps_listing["linux"] %>
<{ slug.replace(".", "") }>-ci-deps:
<%- do test_salt_needs.append(slug.replace(".", "") + "-ci-deps") %>
name: <{ display_name }> Deps
<%- if workflow_slug != 'release' %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- endif %>
needs:
- prepare-workflow
<%- if workflow_slug != 'release' %>
- build-salt-onedir-linux
<%- else %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/build-deps-ci-action.yml
with:
distro-slug: <{ slug }>
nox-session: ci-test-onedir
platform: linux
arch: <{ arch }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
<%- endfor %>

View file

@ -8,18 +8,21 @@
("debian", "10", "aarch64"),
("debian", "11", "x86_64"),
("debian", "11", "aarch64"),
("debian", "12", "x86_64"),
("debian", "12", "aarch64"),
("ubuntu", "20.04", "x86_64"),
("ubuntu", "20.04", "aarch64"),
("ubuntu", "22.04", "x86_64"),
("ubuntu", "22.04", "aarch64"),
) %>
- distro: <{ distro }>
- pkg-type: deb
distro: <{ distro }>
version: "<{ version }>"
arch: <{ arch }>
<%- endfor %>
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download System Dependencies
run: |

View file

@ -1,6 +1,12 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- macos
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -1,6 +1,12 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- onedir
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -1,23 +1,28 @@
<%- for pkg_type, display_name, runner_type in (
("rpm", "RPM", "self-hosted"),
("deb", "DEB", "self-hosted"),
("windows", "Windows", "github-hosted"),
("macos", "macOS", "github-hosted"),
) %>
<%- for platform, pkg_type, display_name, runner_type in (
("linux", "rpm", "RPM", "self-hosted"),
("linux", "deb", "DEB", "self-hosted"),
("windows", "windows", "Windows", "github-hosted"),
("macos", "macos", "macOS", "github-hosted"),
) %>
<%- set job_name = "build-{}-pkgs".format(pkg_type) %>
<%- for backend in ("onedir", "src") %>
<%- set job_name = "build-{}-pkgs-{}".format(pkg_type, backend) %>
<%- if backend == "src" %>
<%- do conclusion_needs.append(job_name) %>
<%- endif %>
<{ job_name }>:
name: Build <{ display_name }> Packages
name: Build Packages
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['<{ runner_type }>'] }}
needs:
- prepare-workflow
- build-salt-onedir
- build-salt-onedir-<{ platform }>
uses: ./.github/workflows/build-<{ pkg_type }>-packages.yml
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>"
source: "<{ backend }>"
<%- if pkg_type in ("macos", "windows") and gh_environment %>
environment: <{ gh_environment }>
<%- if pkg_type == "macos" %>
@ -28,4 +33,5 @@
secrets: inherit
<%- endif %>
<%- endfor %>
<%- endfor %>

View file

@ -1,17 +1,17 @@
<%- for type, display_name, needs_pkg in (
("src", "Source", False),
("deb", "DEB", True),
("rpm", "RPM", True),
("windows", "Windows", True),
("macos", "macOS", True),
("onedir", "Onedir", False),
) %>
<%- for type, display_name in (
("src", "Source"),
("deb", "DEB"),
("rpm", "RPM"),
("windows", "Windows"),
("macos", "macOS"),
("onedir", "Onedir"),
) %>
<%- set job_name = "build-{}-repo".format(type) %>
<%- do build_repo_needs.append(job_name) %>
<{ job_name }>:
name: Build <{ display_name }> Repository
name: Build Repository
environment: <{ gh_environment }>
runs-on:
- self-hosted
@ -19,11 +19,16 @@
- repo-<{ gh_environment }>
needs:
- prepare-workflow
<%- if needs_pkg %>
- build-<{ type }>-pkgs
<%- else %>
- build-salt-onedir
<%- if type not in ("src", "onedir") %>
- build-<{ type }>-pkgs-onedir
<%- elif type == 'onedir' %>
- build-salt-onedir-linux
- build-salt-onedir-macos
- build-salt-onedir-windows
<%- elif type == 'src' %>
- build-source-tarball
<%- endif %>
<%- include "build-{}-repo.yml.jinja".format(type) %>
<%- endfor %>

View file

@ -23,13 +23,14 @@
("photon", "4", "x86_64"),
("photon", "4", "aarch64"),
) %>
- distro: <{ distro }>
- pkg-type: rpm
distro: <{ distro }>
version: "<{ version }>"
arch: <{ arch }>
<%- endfor %>
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Download System Dependencies
run: |

View file

@ -1,6 +1,12 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- src
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -1,6 +1,12 @@
strategy:
fail-fast: false
matrix:
pkg-type:
- windows
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -57,7 +57,7 @@
needs:
- prepare-workflow
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Get Python Version
id: get-python-version
@ -220,7 +220,7 @@
- prepare-release
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v4
@ -259,14 +259,16 @@
<%- set job_name = "build-deps-onedir" %>
<%- if includes.get(job_name, True) %>
<%- for platform in ("linux", "windows", "macos") %>
<%- set platform_job_name = "{}-{}".format(job_name, platform) %>
<{ job_name }>:
<%- do conclusion_needs.append(job_name) %>
<{ platform_job_name }>:
<%- do conclusion_needs.append(platform_job_name) %>
name: Build Dependencies Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml
uses: ./.github/workflows/build-deps-onedir-<{ platform }>.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
@ -275,21 +277,24 @@
relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>"
<%- endfor %>
<%- endif %>
<%- set job_name = "build-salt-onedir" %>
<%- if includes.get(job_name, True) %>
<%- for platform in ("linux", "windows", "macos") %>
<%- set platform_job_name = "{}-{}".format(job_name, platform) %>
<{ job_name }>:
<%- do conclusion_needs.append(job_name) %>
<{ platform_job_name }>:
<%- do conclusion_needs.append(platform_job_name) %>
name: Build Salt Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] }}
needs:
- prepare-workflow
- build-deps-onedir
- build-deps-onedir-<{ platform }>
- build-source-tarball
uses: ./.github/workflows/build-salt-onedir.yml
uses: ./.github/workflows/build-salt-onedir-<{ platform }>.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
@ -298,6 +303,7 @@
relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>"
<%- endfor %>
<%- endif %>
@ -306,15 +312,18 @@
<%- include "build-packages.yml.jinja" %>
<%- endif %>
<%- set pkg_tests_job_name = "pkg-tests" %>
<%- set salt_tests_job_name = "salt-tests" %>
<%- if includes.get(pkg_tests_job_name, True) or includes.get(salt_tests_job_name, True) %>
<%- include "build-ci-deps.yml.jinja" %>
<%- endif %>
<%- set job_name = "pkg-tests" %>
<%- if includes.get(job_name, True) %>
<%- if includes.get(pkg_tests_job_name, True) %>
<%- include "test-salt-pkg.yml.jinja" %>
<%- endif %>
<%- set job_name = "salt-tests" %>
<%- if includes.get(job_name, True) %>
<%- if includes.get(salt_tests_job_name, True) %>
<%- include "test-salt.yml.jinja" %>
<%- endif %>
@ -331,7 +340,7 @@
- <{ need }>
<%- endfor %>
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Set up Python 3.10
if: ${{ github.event.repository.private == false }}
@ -343,6 +352,19 @@
run: |
python3 -m pip install 'nox==<{ nox_version }>'
{# We can't yet use tokenless uploads with the codecov CLI
- name: Install Codecov CLI
run: |
python3 -m pip install codecov-cli
- name: Tell Codecov To Process Reports
run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
send-notifications --git-service github --sha ${{ github.sha }}
#}
- name: Get coverage reports
id: get-coverage-reports
uses: actions/download-artifact@v3
@ -357,15 +379,51 @@
run: |
nox --force-color -e combine-coverage
- name: Create Code Coverage HTML Report
- name: Report Salt Code Coverage
run: |
nox --force-color -e coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
- name: Create Salt Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report -- salt
- name: Upload Salt Code Coverage HTML Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-salt-html-report
path: artifacts/coverage/html/salt
retention-days: 7
if-no-files-found: error
- name: Report Combined Code Coverage
run: |
nox --force-color -e coverage-report
- name: Create Combined Code Coverage JSON Report
run: |
nox --force-color -e create-json-coverage-reports
- name: Upload Combined Code Coverage JSON Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-full-json-report
path: artifacts/coverage/coverage.json
retention-days: 7
if-no-files-found: error
- name: Create Combined Code Coverage HTML Report
run: |
nox --force-color -e create-html-coverage-report
- name: Upload Code Coverage HTML Report
- name: Upload Combined Code Coverage HTML Report
uses: actions/upload-artifact@v3
with:
name: code-coverage-html-report
path: artifacts/coverage/html
name: code-coverage-full-html-report
path: artifacts/coverage/html/full
retention-days: 7
if-no-files-found: error
<%- endif %>

View file

@ -9,6 +9,7 @@
<%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %>
<%- set gpg_key_id = "64CBBC8173D76B3F" %>
<%- set prepare_actual_release = prepare_actual_release | default(False) %>
<%- set gh_actions_workflows_python_version = "3.10" %>
---
<%- block name %>
name: <{ workflow_name }>
@ -97,7 +98,7 @@ jobs:
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Full clone to also get the tags to get the right salt version
@ -308,6 +309,27 @@ jobs:
run: |
tools ci get-release-changelog-target ${{ github.event_name }}
{# We can't yet use tokenless uploads with the codecov CLI
- name: Install Codecov CLI
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
run: |
python3 -m pip install codecov-cli
- name: Save Commit Metadata In Codecov
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
create-commit --git-service github --sha ${{ github.sha }}
- name: Create Codecov Coverage Report
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
create-report --git-service github --sha ${{ github.sha }}
#}
<%- endblock prepare_workflow_job %>
<%- endif %>

View file

@ -157,7 +157,7 @@ concurrency:
<%- endif %>
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Get Salt Project GitHub Actions Bot Environment
run: |

View file

@ -87,7 +87,7 @@ permissions:
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Full clone to also get the tags to get the right salt version
@ -140,7 +140,7 @@ permissions:
- name: Set Cache Seed Output
id: set-cache-seed
run: |
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
tools ci define-cache-seed ${{ env.CACHE_SEED }}
<%- endblock prepare_workflow_job %>
<%- endif %>
@ -173,7 +173,7 @@ permissions:
- platform: darwin
arch: x86_64
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -196,6 +196,8 @@ permissions:
retention-days: 7
if-no-files-found: error
<%- include "build-ci-deps.yml.jinja" %>
backup:
name: Backup
runs-on:
@ -210,7 +212,7 @@ permissions:
steps:
- name: Clone The Salt Repository
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Rclone
uses: AnimMouse/setup-rclone@v1
@ -240,7 +242,7 @@ permissions:
steps:
- name: Clone The Salt Repository
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Get Salt Project GitHub Actions Bot Environment
run: |
@ -259,7 +261,7 @@ permissions:
tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }}
<%- if includes.get("test-pkg-downloads", True) %>
<%- include "test-pkg-repo-downloads.yml.jinja" %>
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
<%- endif %>
release:
@ -280,7 +282,7 @@ permissions:
environment: <{ gh_environment }>
steps:
- name: Clone The Salt Repository
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
@ -391,7 +393,7 @@ permissions:
environment: <{ gh_environment }>
steps:
- name: Clone The Salt Repository
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
@ -422,7 +424,7 @@ permissions:
- linux
- repo-<{ gh_environment }>
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -94,7 +94,7 @@ concurrency:
- linux
- repo-<{ gh_environment }>
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Get Salt Project GitHub Actions Bot Environment
run: |
@ -158,7 +158,7 @@ concurrency:
if-no-files-found: error
<%- if includes.get("test-pkg-downloads", True) %>
<%- include "test-pkg-repo-downloads.yml.jinja" %>
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
<%- endif %>
publish-pypi:
@ -183,7 +183,7 @@ concurrency:
- linux
- repo-<{ gh_environment }>
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -0,0 +1,737 @@
name: Test Download Packages
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
environment:
required: true
type: string
description: The environment to run tests against
latest-release:
required: true
type: string
description: The latest salt release
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
nox-session:
required: false
type: string
description: The nox session to run
default: ci-test-onedir
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
linux:
name: Linux
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["linux"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- endfor %>
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
- name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ matrix.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ matrix.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs
- name: Run Package Download Tests
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm combine-coverage ${{ matrix.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ matrix.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \
# --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ matrix.distro-slug }},pkg \
--name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \
# --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ matrix.distro-slug }},pkg \
--name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success'
with:
check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
macos:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["macos"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- endfor %>
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Install System Dependencies
run: |
brew install tree
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
update-environment: true
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ matrix.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- ${{ matrix.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "1"
PRINT_TEST_PLAN_ONLY: "1"
PRINT_SYSTEM_INFO: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs
- name: Run Package Download Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ matrix.distro-slug }}
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/macos/codecov
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \
# --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ matrix.distro-slug }},pkg \
--name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \
# --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ matrix.distro-slug }},pkg \
--name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always()
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && job.status != 'cancelled'
with:
check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
windows:
name: Windows
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
include:
<%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["windows"] %>
- distro-slug: <{ slug }>
arch: <{ arch }>
pkg-type: <{ pkg_type }>
<%- endfor %>
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
- name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ matrix.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ matrix.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs
- name: Run Package Download Tests
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm combine-coverage ${{ matrix.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ matrix.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \
# --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ matrix.distro-slug }},pkg \
--name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \
# --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ matrix.distro-slug }},pkg \
--name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success'
with:
check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }} ${{ matrix.pkg-type }} )
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -1,138 +0,0 @@
<%- set linux_pkg_tests = (
("almalinux-8", "Alma Linux 8", "x86_64", "package"),
("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64", "package"),
("almalinux-9", "Alma Linux 9", "x86_64", "package"),
("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64", "package"),
("amazonlinux-2", "Amazon Linux 2", "x86_64", "package"),
("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "package"),
("centos-7", "CentOS 7", "x86_64", "package"),
("centos-7-arm64", "CentOS 7 Arm64", "aarch64", "package"),
("centosstream-8", "CentOS Stream 8", "x86_64", "package"),
("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64", "package"),
("centosstream-9", "CentOS Stream 9", "x86_64", "package"),
("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64", "package"),
("debian-10", "Debian 10", "x86_64", "package"),
("debian-11", "Debian 11", "x86_64", "package"),
("debian-11-arm64", "Debian 11 Arm64", "aarch64", "package"),
("fedora-37", "Fedora 37", "x86_64", "package"),
("fedora-37-arm64", "Fedora 37 Arm64", "aarch64", "package"),
("fedora-38", "Fedora 38", "x86_64", "package"),
("fedora-38-arm64", "Fedora 38 Arm64", "aarch64", "package"),
("photonos-3", "Photon OS 3", "x86_64", "package"),
("photonos-4", "Photon OS 4", "x86_64", "package"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "package"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "package"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "package"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "package"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "package"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "onedir"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "onedir")
) %>
<%- for slug, display_name, arch, pkg_type in linux_pkg_tests %>
<%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
name: Test <{ display_name }> <{ pkg_type }> Downloads
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
<%- endif %>
needs:
- prepare-workflow
- publish-repositories
<%- if gh_environment == "release" %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: <{ slug }>
platform: linux
arch: <{ arch }>
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
nox-version: <{ nox_version }>
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
pkg-type: <{ pkg_type.lower() }>
secrets: inherit
<%- endfor %>
<%- for slug, display_name, arch, pkg_type in (
("macos-12", "macOS 12", "x86_64", "Package"),
("macos-12", "macOS 12", "x86_64", "Onedir"),
) %>
<%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
name: Test <{ display_name }> <{ pkg_type }> Downloads
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
<%- endif %>
needs:
- prepare-workflow
- publish-repositories
<%- if gh_environment == "release" %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/test-package-downloads-action-macos.yml
with:
distro-slug: <{ slug }>
platform: darwin
arch: <{ arch }>
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
nox-version: <{ nox_version }>
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
pkg-type: <{ pkg_type.lower() }>
secrets: inherit
<%- endfor %>
<%- for slug, display_name, arch in (
("windows-2022", "Windows 2022", "amd64"),
) %>
<%- for pkg_type in ("NSIS", "MSI", "Onedir") %>
<%- set job_name = "{}-{}-{}-download-tests".format(slug.replace(".", ""), pkg_type, arch.lower()) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
<%- endif %>
name: Test <{ display_name }> <{ arch }> <{ pkg_type }> Package Downloads
needs:
- prepare-workflow
- publish-repositories
<%- if gh_environment == "release" %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: <{ slug }>
platform: windows
arch: <{ arch }>
pkg-type: <{ pkg_type.lower() }>
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
nox-version: <{ nox_version }>
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
<%- endfor %>
<%- endfor %>

View file

@ -0,0 +1,36 @@
<%- set job_name = "pkg-download-tests" %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
<%- do conclusion_needs.append(job_name) %>
name: Package Downloads
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
<%- endif %>
needs:
- prepare-workflow
- publish-repositories
<%- for slug in test_salt_pkg_downloads_needs_slugs %>
- <{ slug }>
<%- endfor %>
<%- if gh_environment == "release" %>
- download-onedir-artifact
<%- else %>
- build-salt-onedir-linux
- build-salt-onedir-macos
- build-salt-onedir-windows
<%- endif %>
uses: ./.github/workflows/test-package-downloads-action.yml
with:
nox-session: ci-test-onedir
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit

View file

@ -1,57 +1,24 @@
<%- set linux_pkg_tests = (
("almalinux-8", "Alma Linux 8", "x86_64", "rpm"),
("almalinux-9", "Alma Linux 9", "x86_64", "rpm"),
("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"),
("centos-7", "CentOS 7", "x86_64", "rpm"),
("centosstream-8", "CentOS Stream 8", "x86_64", "rpm"),
("centosstream-9", "CentOS Stream 9", "x86_64", "rpm"),
("debian-10", "Debian 10", "x86_64", "deb"),
("debian-11", "Debian 11", "x86_64", "deb"),
("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"),
("fedora-37", "Fedora 37", "x86_64", "rpm"),
("fedora-38", "Fedora 38", "x86_64", "rpm"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb")
) %>
<%- set linux_pkg_tests = (
("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"),
("centos-7", "CentOS 7", "x86_64", "rpm"),
("centosstream-8", "CentOS Stream 8", "x86_64", "rpm"),
("centosstream-9", "CentOS Stream 9", "x86_64", "rpm"),
("debian-10", "Debian 10", "x86_64", "deb"),
("debian-11", "Debian 11", "x86_64", "deb"),
("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"),
("photonos-3", "Photon OS 3", "x86_64", "rpm"),
("photonos-4", "Photon OS 4", "x86_64", "rpm"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb")
) %>
<%- for slug, display_name, arch, pkg_type in linux_pkg_tests %>
<%- for slug, display_name, arch, pkg_type in test_salt_pkg_listing["linux"] %>
<%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> Package Tests
name: <{ display_name }> Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-<{ pkg_type }>-pkgs
- build-<{ pkg_type }>-pkgs-onedir
- <{ slug.replace(".", "") }>-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: <{ slug }>
nox-session: ci-test-onedir
platform: linux
arch: <{ arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ pkg_type }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
skip-junit-reports: <{ skip_junit_reports_check }>
@ -61,24 +28,27 @@
<%- for slug, display_name, arch in (("macos-12", "macOS 12", "x86_64"),) %>
<%- for slug, display_name, arch in test_salt_pkg_listing["macos"] %>
<%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> Package Tests
name: <{ display_name }> Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
- prepare-workflow
- build-macos-pkgs
- build-macos-pkgs-onedir
- <{ slug.replace(".", "") }>-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: <{ slug }>
nox-session: ci-test-onedir
platform: darwin
arch: <{ arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: macos
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
skip-junit-reports: <{ skip_junit_reports_check }>
@ -87,27 +57,28 @@
<%- endfor %>
<%- for slug, display_name, arch in (("windows-2016", "Windows 2016", "amd64"),
("windows-2019", "Windows 2019", "amd64"),
("windows-2022", "Windows 2022", "amd64")) %>
<%- for slug, display_name, arch in test_salt_pkg_listing["windows"] %>
<%- for pkg_type in ("NSIS", "MSI") %>
<%- set job_name = "{}-{}-pkg-tests".format(slug.replace(".", ""), pkg_type.lower()) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ display_name }> <{ pkg_type }> Package Tests
name: <{ display_name }> <{ pkg_type }> Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-windows-pkgs
- build-windows-pkgs-onedir
- <{ slug.replace(".", "") }>-ci-deps
uses: ./.github/workflows/test-packages-action.yml
with:
distro-slug: <{ slug }>
nox-session: ci-test-onedir
platform: windows
arch: <{ arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ pkg_type }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
skip-junit-reports: <{ skip_junit_reports_check }>

View file

@ -1,15 +1,13 @@
<%- for slug, display_name, arch in (("windows-2016", "Windows 2016", "amd64"),
("windows-2019", "Windows 2019", "amd64"),
("windows-2022", "Windows 2022", "amd64")) %>
<%- for slug, display_name, arch in test_salt_listing["windows"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }>
name: <{ display_name }> Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir
- <{ slug.replace(".", "") }>-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: <{ slug }>
@ -17,6 +15,7 @@
platform: windows
arch: amd64
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
@ -25,15 +24,16 @@
<%- endfor %>
<%- for slug, display_name, arch in (("macos-12", "macOS 12", "x86_64"),) %>
<%- for slug, display_name, arch in test_salt_listing["macos"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }>
name: <{ display_name }> Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir
- <{ slug.replace(".", "") }>-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: <{ slug }>
@ -41,6 +41,7 @@
platform: darwin
arch: x86_64
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
@ -49,34 +50,16 @@
<%- endfor %>
<%- for slug, display_name, arch in (("almalinux-8", "Alma Linux 8", "x86_64"),
("almalinux-9", "Alma Linux 9", "x86_64"),
("amazonlinux-2", "Amazon Linux 2", "x86_64"),
("archlinux-lts", "Arch Linux LTS", "x86_64"),
("centos-7", "CentOS 7", "x86_64"),
("centosstream-8", "CentOS Stream 8", "x86_64"),
("centosstream-9", "CentOS Stream 9", "x86_64"),
("debian-10", "Debian 10", "x86_64"),
("debian-11", "Debian 11", "x86_64"),
("debian-11-arm64", "Debian 11 Arm64", "aarch64"),
("fedora-37", "Fedora 37", "x86_64"),
("fedora-38", "Fedora 38", "x86_64"),
("opensuse-15", "Opensuse 15", "x86_64"),
("photonos-3", "Photon OS 3", "x86_64"),
("photonos-4", "Photon OS 4", "x86_64"),
("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64")) %>
<%- for slug, display_name, arch in test_salt_listing["linux"] %>
<{ slug.replace(".", "") }>:
<%- do test_salt_needs.append(slug.replace(".", "")) %>
name: <{ display_name }>
name: <{ display_name }> Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-salt-onedir
- <{ slug.replace(".", "") }>-ci-deps
uses: ./.github/workflows/test-action.yml
with:
distro-slug: <{ slug }>
@ -84,6 +67,7 @@
platform: linux
arch: <{ arch }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>

View file

@ -20,7 +20,7 @@ on:
required: false
type: string
description: The python version to run tests with
default: "3.9"
default: "3.11"
salt-version:
type: string
required: true
@ -57,7 +57,6 @@ on:
description: Skip Publishing JUnit Reports
default: false
env:
COLUMNS: 190
PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/"
@ -66,14 +65,19 @@ env:
jobs:
generate-matrix:
name: Generate Test Matrix
name: Test Matrix
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
outputs:
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
transport-matrix-include: ${{ steps.generate-transport-matrix.outputs.matrix }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -81,123 +85,32 @@ jobs:
- name: Generate Test Matrix
id: generate-matrix
run: |
TEST_MATRIX=$(tools ci matrix ${{ inputs.distro-slug }})
echo "$TEST_MATRIX"
echo "matrix=$TEST_MATRIX" >> "$GITHUB_OUTPUT"
- name: Generate Transport Matrix
id: generate-transport-matrix
run: |
TRANSPORT_MATRIX=$(tools ci transport-matrix ${{ inputs.distro-slug }})
echo "$TRANSPORT_MATRIX"
echo "matrix=$TRANSPORT_MATRIX" >> "$GITHUB_OUTPUT"
dependencies:
name: Setup Test Dependencies
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 90
needs:
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'noxfile.py') }}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
- name: Install System Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
brew install openssl@3
- name: Install Nox
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Define Nox Session
id: define-nox-session
run: |
if [ "${{ matrix.transport }}" != "tcp" ]; then
echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV"
echo "nox-session=${{ inputs.nox-session}}" >> "$GITHUB_OUTPUT"
else
echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV"
echo "nox-session=${{ inputs.nox-session}}-tcp" >> "$GITHUB_OUTPUT"
fi
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
run: |
export PYCURL_SSL_LIBRARY=openssl
export LDFLAGS="-L/usr/local/opt/openssl@3/lib"
export CPPFLAGS="-I/usr/local/opt/openssl@3/include"
export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig"
nox --install-only -e ${{ env.NOX_SESSION }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox -e "pre-archive-cleanup(pkg=False)"
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ steps.define-nox-session.outputs.nox-session }}
path: nox.${{ inputs.distro-slug }}.tar.*
tools ci matrix ${{ inputs.distro-slug }}
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 360 # 6 Hours
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 5 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 120 || 300 }}
needs:
- generate-matrix
- dependencies
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
env:
SALT_TRANSPORT: ${{ matrix.transport }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Salt Version
run: |
@ -224,7 +137,9 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'noxfile.py') }}
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
@ -241,14 +156,6 @@ jobs:
run: |
nox -e decompress-dependencies -- ${{ inputs.distro-slug }}
- name: Define Nox Session
run: |
if [ "${{ matrix.transport }}" != "tcp" ]; then
echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV"
else
echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV"
fi
- name: Download testrun-changed-files.txt
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
uses: actions/download-artifact@v3
@ -265,7 +172,7 @@ jobs:
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "1"
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin"
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin"
- name: Run Fast/Changed Tests
id: run-fast-changed-tests
@ -281,7 +188,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
@ -299,7 +206,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests \
--from-filenames=testrun-changed-files.txt
@ -317,7 +224,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests \
--from-filenames=testrun-changed-files.txt
@ -335,7 +242,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code
- name: Run Slow Tests
@ -352,7 +259,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests
- name: Run Core Tests
@ -369,7 +276,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests
- name: Run Flaky Tests
@ -386,7 +293,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --flaky-jail
- name: Run Full Tests
@ -403,7 +310,7 @@ jobs:
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \
sudo -E nox -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
--slow-tests --core-tests -k "mac or darwin"
- name: Fix file ownership
@ -411,95 +318,75 @@ jobs:
sudo chown -R "$(id -un)" .
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false
run: |
nox -e combine-coverage
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}
echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
fi
- name: Upload Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
path: |
artifacts/coverage/
- name: Upload All Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: all-testrun-coverage-artifacts
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: |
artifacts/coverage/
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
path: |
artifacts/xml-unittests-output/
- name: Upload Test Run Log Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
path: |
artifacts/logs
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && inputs.skip-junit-reports == false
with:
check_name: Test Results(${{ inputs.distro-slug }}, transport=${{ matrix.transport }}, tests-chunk=${{ matrix.tests-chunk }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }})
if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
name: Test Reports
if: always() && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
needs:
- generate-matrix
- test
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Define Nox Session
run: |
if [ "${{ matrix.transport }}" != "tcp" ]; then
echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV"
else
echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV"
fi
uses: actions/checkout@v4
- name: Download Code Coverage Test Run Artifacts
uses: actions/download-artifact@v3
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: artifacts/coverage/
- name: Download JUnit XML Test Run Artifacts
uses: actions/download-artifact@v3
id: download-junit-artifacts
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
path: artifacts/xml-unittests-output/
- name: Show Downloaded Test Run Artifacts
run: |
tree -a artifacts
@ -513,18 +400,88 @@ jobs:
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Combine Code Coverage
if: ${{ inputs.skip-code-coverage == false }}
continue-on-error: true
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
nox -e create-xml-coverage-reports
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: code-coverage
path: artifacts/coverage
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/macos/codecov
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }} \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }} \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
@ -532,23 +489,21 @@ jobs:
run: |
nox --force-color -e report-coverage -- salt
- name: Report Tests Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage -- tests
- name: Report Combined Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && inputs.skip-junit-reports == false && steps.download-junit-artifacts.outcome == 'success'
- name: Rename Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
check_name: Test Results(${{ inputs.distro-slug }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
name: all-testrun-coverage-artifacts
path: artifacts/coverage

View file

@ -36,6 +36,11 @@ on:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
@ -52,7 +57,6 @@ on:
description: Skip Publishing JUnit Reports
default: false
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
@ -63,17 +67,22 @@ env:
jobs:
generate-matrix:
name: Generate Test Matrix
name: Test Matrix
runs-on:
- self-hosted
- linux
- x86_64
outputs:
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
transport-matrix-include: ${{ steps.generate-transport-matrix.outputs.matrix }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -81,129 +90,7 @@ jobs:
- name: Generate Test Matrix
id: generate-matrix
run: |
TEST_MATRIX=$(tools ci matrix ${{ inputs.distro-slug }})
echo "$TEST_MATRIX"
echo "matrix=$TEST_MATRIX" >> "$GITHUB_OUTPUT"
- name: Generate Transport Matrix
id: generate-transport-matrix
run: |
TRANSPORT_MATRIX=$(tools ci transport-matrix ${{ inputs.distro-slug }})
echo "$TRANSPORT_MATRIX"
echo "matrix=$TRANSPORT_MATRIX" >> "$GITHUB_OUTPUT"
dependencies:
name: Setup Test Dependencies
needs:
- generate-matrix
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: PyPi Proxy
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: ./.github/actions/setup-python-tools-scripts
- name: Define Nox Session
id: define-nox-session
run: |
if [ "${{ matrix.transport }}" != "tcp" ]; then
echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV"
echo "nox-session=${{ inputs.nox-session}}" >> "$GITHUB_OUTPUT"
else
echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV"
echo "nox-session=${{ inputs.nox-session}}-tcp" >> "$GITHUB_OUTPUT"
fi
- name: Get Salt Project GitHub Actions Bot Environment
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm install-dependencies --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ steps.define-nox-session.outputs.nox-session }}
path: nox.${{ inputs.distro-slug }}.tar.*
tools ci matrix ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }}
test:
name: Test
@ -211,18 +98,28 @@ jobs:
- self-hosted
- linux
- bastion
timeout-minutes: 300 # 5 Hours - More than this and something is wrong
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 5 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 120 || 300 }}
needs:
- dependencies
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
env:
SALT_TRANSPORT: ${{ matrix.transport }}
TEST_GROUP: ${{ matrix.test-group || 1 }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Salt Version
run: |
@ -245,7 +142,9 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
@ -256,14 +155,6 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Define Nox Session
run: |
if [ "${{ matrix.transport }}" != "tcp" ]; then
echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV"
else
echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV"
fi
- name: Download testrun-changed-files.txt
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
uses: actions/download-artifact@v3
@ -298,7 +189,7 @@ jobs:
- name: Show System Info & Test Plan
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }}
- name: Run Fast/Changed Tests
@ -306,7 +197,7 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
@ -315,7 +206,7 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
@ -324,7 +215,7 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
@ -333,7 +224,7 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
- name: Run Slow Tests
@ -341,7 +232,7 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests
- name: Run Core Tests
@ -349,7 +240,7 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests
- name: Run Flaky Tests
@ -357,7 +248,7 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail
- name: Run Full Tests
@ -365,17 +256,18 @@ jobs:
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
-E TEST_GROUP ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \
--test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
@ -383,8 +275,7 @@ jobs:
rm -rf artifacts/salt*
tree -a artifacts
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}
echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }}
fi
- name: Destroy VM
@ -396,77 +287,58 @@ jobs:
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
path: |
artifacts/coverage/
- name: Upload All Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: all-testrun-coverage-artifacts
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: |
artifacts/coverage/
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
path: |
artifacts/xml-unittests-output/
- name: Upload Test Run Log Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}
path: |
artifacts/logs
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && inputs.skip-junit-reports == false && job.status != 'cancelled'
with:
check_name: Test Results(${{ inputs.distro-slug }}, transport=${{ matrix.transport }}, tests-chunk=${{ matrix.tests-chunk }}, group=${{ matrix.test-group || '1' }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }})
if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
name: Test Reports
if: always() && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
runs-on:
- self-hosted
- linux
- x86_64
needs:
- generate-matrix
- test
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Define Nox Session
run: |
if [ "${{ matrix.transport }}" != "tcp" ]; then
echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV"
else
echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV"
fi
uses: actions/checkout@v4
- name: Download Code Coverage Test Run Artifacts
uses: actions/download-artifact@v3
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: artifacts/coverage/
- name: Download JUnit XML Test Run Artifacts
uses: actions/download-artifact@v3
id: download-junit-artifacts
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
path: artifacts/xml-unittests-output/
- name: Show Downloaded Test Run Artifacts
run: |
tree -a artifacts
@ -475,18 +347,88 @@ jobs:
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Combine Code Coverage
if: ${{ inputs.skip-code-coverage == false }}
continue-on-error: true
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
nox -e create-xml-coverage-reports
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: code-coverage
path: artifacts/coverage
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ inputs.distro-slug }} \
# --name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ inputs.distro-slug }} \
--name salt.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ inputs.distro-slug }} \
# --name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ inputs.distro-slug }} \
--name tests.${{ inputs.distro-slug }}.${{ inputs.nox-session }} --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
@ -494,23 +436,21 @@ jobs:
run: |
nox --force-color -e report-coverage -- salt
- name: Report Tests Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage -- tests
- name: Report Combined Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && inputs.skip-junit-reports == false && steps.download-junit-artifacts.outcome == 'success'
- name: Rename Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v3
with:
check_name: Test Results(${{ inputs.distro-slug }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
name: all-testrun-coverage-artifacts
path: artifacts/coverage

View file

@ -1,328 +0,0 @@
name: Test Download Packages
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
environment:
required: true
type: string
description: The environment to run tests against
latest-release:
required: true
type: string
description: The latest salt release
pkg-type:
required: true
type: string
description: The type of artifact to download
nox-version:
required: true
type: string
description: The nox version to install
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
nox-session:
required: false
type: string
description: The nox session to run
default: test-pkgs-onedir
env:
COLUMNS: 160
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
dependencies:
name: Setup Test Dependencies
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 90
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
name: Test
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- dependencies
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ inputs.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }}
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs
- name: Run Package Download Tests
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ inputs.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }})
runs-on:
- self-hosted
- linux
- x86_64
environment: ${{ inputs.environment }}
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- test
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: artifacts
- name: Show Test Run Artifacts
if: always() && steps.download-test-run-artifacts.outcome == 'success'
run: |
tree -a artifacts
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success'
with:
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -1,304 +0,0 @@
name: Test Download Packages
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
environment:
required: true
type: string
description: The environment to run tests against
latest-release:
required: true
type: string
description: The latest salt release
pkg-type:
required: true
type: string
description: The type of artifact to download
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
nox-session:
required: false
type: string
description: The nox session to run
default: test-pkgs-onedir
env:
COLUMNS: 160
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
dependencies:
name: Setup Test Dependencies
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 90
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
update-environment: true
- name: Install System Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
brew install openssl@3
- name: Install Nox
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
run: |
export PYCURL_SSL_LIBRARY=openssl
export LDFLAGS="-L/usr/local/opt/openssl@3/lib"
export CPPFLAGS="-I/usr/local/opt/openssl@3/include"
export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig"
nox --force-color --install-only -e ${{ inputs.nox-session }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e "pre-archive-cleanup(pkg=False)"
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- dependencies
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Install System Dependencies
run: |
brew install tree
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
update-environment: true
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "1"
PRINT_TEST_PLAN_ONLY: "1"
PRINT_SYSTEM_INFO: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs
- name: Run Package Download Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ inputs.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Upload Test Run Artifacts
if: always() && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }})
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
environment: ${{ inputs.environment }}
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- test
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: artifacts
- name: Show Test Run Artifacts
if: always() && steps.download-test-run-artifacts.outcome == 'success'
run: |
tree -a artifacts
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success'
with:
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -1,333 +0,0 @@
name: Test Download Packages
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
pkg-type:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
environment:
required: true
type: string
description: The environment to run tests against
latest-release:
required: true
type: string
description: The latest salt release
nox-version:
required: true
type: string
description: The nox version to install
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
nox-session:
required: false
type: string
description: The nox session to run
default: test-pkgs-onedir
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
skip-junit-reports:
required: false
type: boolean
description: Skip Publishing JUnit Reports
default: false
env:
COLUMNS: 160
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
dependencies:
name: Setup Test Dependencies
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 90
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-download-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
name: Test
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- dependencies
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ inputs.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }}
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs
- name: Run Package Download Tests
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ inputs.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }})
runs-on:
- self-hosted
- linux
- x86_64
environment: ${{ inputs.environment }}
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- test
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: artifacts
- name: Show Test Run Artifacts
if: always() && steps.download-test-run-artifacts.outcome == 'success'
run: |
tree -a artifacts
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success'
with:
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }} ${{ inputs.pkg-type }} )
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -0,0 +1,827 @@
name: Test Download Packages
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
environment:
required: true
type: string
description: The environment to run tests against
latest-release:
required: true
type: string
description: The latest salt release
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
nox-session:
required: false
type: string
description: The nox session to run
default: ci-test-onedir
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
linux:
name: Linux
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
include:
- distro-slug: almalinux-8
arch: x86_64
pkg-type: package
- distro-slug: almalinux-8-arm64
arch: aarch64
pkg-type: package
- distro-slug: almalinux-9
arch: x86_64
pkg-type: package
- distro-slug: almalinux-9-arm64
arch: aarch64
pkg-type: package
- distro-slug: amazonlinux-2
arch: x86_64
pkg-type: package
- distro-slug: amazonlinux-2-arm64
arch: aarch64
pkg-type: package
- distro-slug: centos-7
arch: x86_64
pkg-type: package
- distro-slug: centos-7-arm64
arch: aarch64
pkg-type: package
- distro-slug: centosstream-8
arch: x86_64
pkg-type: package
- distro-slug: centosstream-8-arm64
arch: aarch64
pkg-type: package
- distro-slug: centosstream-9
arch: x86_64
pkg-type: package
- distro-slug: centosstream-9-arm64
arch: aarch64
pkg-type: package
- distro-slug: debian-10
arch: x86_64
pkg-type: package
- distro-slug: debian-11
arch: x86_64
pkg-type: package
- distro-slug: debian-11-arm64
arch: aarch64
pkg-type: package
- distro-slug: debian-12
arch: x86_64
pkg-type: package
- distro-slug: debian-12-arm64
arch: aarch64
pkg-type: package
- distro-slug: fedora-37
arch: x86_64
pkg-type: package
- distro-slug: fedora-37-arm64
arch: aarch64
pkg-type: package
- distro-slug: fedora-38
arch: x86_64
pkg-type: package
- distro-slug: fedora-38-arm64
arch: aarch64
pkg-type: package
- distro-slug: photonos-3
arch: x86_64
pkg-type: package
- distro-slug: photonos-4
arch: x86_64
pkg-type: package
- distro-slug: photonos-4-arm64
arch: aarch64
pkg-type: package
- distro-slug: ubuntu-20.04
arch: x86_64
pkg-type: package
- distro-slug: ubuntu-20.04-arm64
arch: aarch64
pkg-type: package
- distro-slug: ubuntu-22.04
arch: x86_64
pkg-type: package
- distro-slug: ubuntu-22.04-arm64
arch: aarch64
pkg-type: package
- distro-slug: ubuntu-22.04
arch: x86_64
pkg-type: onedir
- distro-slug: ubuntu-22.04-arm64
arch: aarch64
pkg-type: onedir
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
- name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ matrix.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ matrix.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs
- name: Run Package Download Tests
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm combine-coverage ${{ matrix.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ matrix.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \
# --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ matrix.distro-slug }},pkg \
--name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \
# --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ matrix.distro-slug }},pkg \
--name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success'
with:
check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
macos:
name: MacOS
runs-on: ${{ matrix.distro-slug }}
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
include:
- distro-slug: macos-12
arch: x86_64
pkg-type: package
- distro-slug: macos-12
arch: x86_64
pkg-type: onedir
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Install System Dependencies
run: |
brew install tree
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
update-environment: true
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ matrix.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- ${{ matrix.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "1"
PRINT_TEST_PLAN_ONLY: "1"
PRINT_SYSTEM_INFO: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs
- name: Run Package Download Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ matrix.distro-slug }}
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/macos/codecov
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \
# --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ matrix.distro-slug }},pkg \
--name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \
# --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ matrix.distro-slug }},pkg \
--name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always()
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && job.status != 'cancelled'
with:
check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true
windows:
name: Windows
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
include:
- distro-slug: windows-2022
arch: amd64
pkg-type: nsis
- distro-slug: windows-2022
arch: amd64
pkg-type: msi
- distro-slug: windows-2022
arch: amd64
pkg-type: onedir
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
- name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ matrix.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ matrix.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }}
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs
- name: Run Package Download Tests
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
tools --timestamps vm combine-coverage ${{ matrix.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ matrix.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Install Codecov CLI
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
# We can't yet use tokenless uploads with the codecov CLI
# python3 -m pip install codecov-cli
#
curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import
curl -Os https://uploader.codecov.io/latest/linux/codecov
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM
curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig
gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM
shasum -a 256 -c codecov.SHA256SUM
chmod +x codecov
- name: Upload Source Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/salt.xml ]; then
echo "The artifacts/coverage/salt.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/salt.xml \
# --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \
# --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/salt.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags salt,${{ matrix.distro-slug }},pkg \
--name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Tests Code Coverage To Codecov
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
run: |
if [ ! -s artifacts/coverage/tests.xml ]; then
echo "The artifacts/coverage/tests.xml file does not exist"
exit 1
fi
# We can't yet use tokenless uploads with the codecov CLI
#codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
# do-upload --git-service github --sha ${{ github.sha }} \
# --file artifacts/coverage/tests.xml \
# --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \
# --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs
n=0
until [ "$n" -ge 5 ]
do
if ./codecov --file artifacts/coverage/tests.xml \
--sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \
--flags tests,${{ matrix.distro-slug }},pkg \
--name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then
rc=$?
break
fi
rc=$?
n=$((n+1))
sleep 15
done
if [ "$rc" -ne 0 ]; then
echo "Failed to upload codecov stats"
exit 1
fi
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success'
with:
check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }} ${{ matrix.pkg-type }} )
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -49,7 +49,7 @@ on:
required: false
type: string
description: The nox session to run
default: test-pkgs-onedir
default: ci-test-onedir
skip-code-coverage:
required: false
type: boolean
@ -61,24 +61,27 @@ on:
description: Skip Publishing JUnit Reports
default: false
env:
COLUMNS: 160
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
COLUMNS: 190
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
jobs:
generate-matrix:
name: Generate Package Test Matrix
name: Generate Matrix
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
outputs:
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -88,88 +91,12 @@ jobs:
run: |
tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
dependencies:
name: Setup Test Dependencies
needs:
- generate-matrix
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 90
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
- name: Install System Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
brew install openssl@3
- name: Install Nox
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
run: |
export PYCURL_SSL_LIBRARY=openssl
export LDFLAGS="-L/usr/local/opt/openssl@3/lib"
export CPPFLAGS="-I/usr/local/opt/openssl@3/include"
export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig"
nox --force-color --install-only -e ${{ inputs.nox-session }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e "pre-archive-cleanup(pkg=False)"
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- dependencies
- generate-matrix
strategy:
fail-fast: false
@ -177,8 +104,14 @@ jobs:
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v3
@ -220,7 +153,9 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
@ -237,7 +172,7 @@ jobs:
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.test-chunk }} \
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.test-chunk }} \
${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}}
- name: Run Package Tests
@ -251,7 +186,7 @@ jobs:
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.test-chunk }} \
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.test-chunk }} \
${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}}
- name: Fix file ownership
@ -260,7 +195,7 @@ jobs:
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
@ -268,7 +203,7 @@ jobs:
tree -a artifacts
- name: Upload Test Run Artifacts
if: always() && job.status != 'cancelled'
if: always()
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }}
@ -278,7 +213,7 @@ jobs:
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.test-chunk }})
name: Report
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
@ -291,7 +226,7 @@ jobs:
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Download Test Run Artifacts
id: download-test-run-artifacts

View file

@ -35,6 +35,11 @@ on:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
@ -44,7 +49,7 @@ on:
required: false
type: string
description: The nox session to run
default: test-pkgs-onedir
default: ci-test-onedir
skip-code-coverage:
required: false
type: boolean
@ -56,9 +61,8 @@ on:
description: Skip Publishing JUnit Reports
default: false
env:
COLUMNS: 160
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
@ -67,7 +71,7 @@ env:
jobs:
generate-matrix:
name: Generate Package Test Matrix
name: Generate Matrix
runs-on:
- self-hosted
- linux
@ -75,8 +79,14 @@ jobs:
outputs:
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -86,107 +96,6 @@ jobs:
run: |
tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
dependencies:
name: Setup Test Dependencies
needs:
- generate-matrix
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 90
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Define Nox Upload Artifact Name
id: nox-artifact-name
run: |
if [ "${{ contains(inputs.distro-slug, 'windows') }}" != "true" ]; then
echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}"
else
echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}"
fi
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: ${{ steps.nox-artifact-name.outputs.name }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
name: Test
@ -197,15 +106,20 @@ jobs:
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- generate-matrix
- dependencies
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v3
@ -234,7 +148,9 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{
hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py')
}}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
@ -264,16 +180,22 @@ jobs:
run: |
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Downgrade importlib-metadata
if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7", "debian-10"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.test-chunk) }}
run: |
# This step can go away once we stop testing classic packages upgrade/downgrades to/from 3005.x
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- "sudo python3 -m pip install -U 'importlib-metadata<=4.13.0' 'virtualenv<=20.21.1'"
- name: Show System Info & Test Plan
run: |
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \
--nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \
${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}}
- name: Run Package Tests
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \
${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}}
- name: Download Test Run Artifacts
@ -302,7 +224,7 @@ jobs:
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.test-chunk }})
name: Report
runs-on:
- self-hosted
- linux
@ -318,7 +240,7 @@ jobs:
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Download Test Run Artifacts
id: download-test-run-artifacts

View file

@ -25,7 +25,7 @@ jobs:
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4

View file

@ -103,7 +103,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=linux
- --include=requirements/base.txt
@ -119,7 +118,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=linux
- --include=requirements/base.txt
@ -135,7 +133,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=linux
- --include=requirements/base.txt
@ -150,6 +147,7 @@ repos:
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.11
- --platform=linux
- --include=requirements/base.txt
@ -165,7 +163,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=freebsd
- --include=requirements/base.txt
@ -181,7 +178,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=freebsd
- --include=requirements/base.txt
@ -197,7 +193,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=freebsd
- --include=requirements/base.txt
@ -212,10 +207,12 @@ repos:
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.11
- --platform=freebsd
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --no-emit-index-url
- requirements/static/pkg/freebsd.in
- id: pip-tools-compile
@ -226,7 +223,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=darwin
- --include=requirements/darwin.txt
@ -241,7 +237,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=darwin
- --include=requirements/darwin.txt
@ -255,6 +250,7 @@ repos:
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.11
- --platform=darwin
- --include=requirements/darwin.txt
@ -269,7 +265,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=windows
- --include=requirements/windows.txt
@ -284,7 +279,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=windows
- --include=requirements/windows.txt
@ -299,7 +293,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=windows
- --include=requirements/windows.txt
@ -313,9 +306,11 @@ repos:
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.11
- --platform=windows
- --include=requirements/windows.txt
- --no-emit-index-url
- requirements/static/pkg/windows.in
# <---- Packaging Requirements -------------------------------------------------------------------------------------
@ -324,12 +319,11 @@ repos:
- id: pip-tools-compile
alias: compile-ci-linux-3.8-zmq-requirements
name: Linux CI Py3.8 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in|py3\.8/linux\.txt)))$
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.8/linux\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=linux
- --include=requirements/base.txt
@ -343,12 +337,11 @@ repos:
- id: pip-tools-compile
alias: compile-ci-linux-3.9-zmq-requirements
name: Linux CI Py3.9 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in|py3\.9/linux\.txt)))$
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.9/linux\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=linux
- --include=requirements/base.txt
@ -362,12 +355,11 @@ repos:
- id: pip-tools-compile
alias: compile-ci-linux-3.10-zmq-requirements
name: Linux CI Py3.10 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in|py3\.10/linux\.txt)))$
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.10/linux\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=linux
- --include=requirements/base.txt
@ -381,12 +373,11 @@ repos:
- id: pip-tools-compile
alias: compile-ci-linux-3.11-zmq-requirements
name: Linux CI Py3.11 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in|py3\.11/linux\.txt)))$
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.11/linux\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=linux
- --include=requirements/base.txt
@ -405,7 +396,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=linux
- --out-prefix=linux
@ -420,7 +410,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=linux
- --out-prefix=linux
@ -430,13 +419,11 @@ repos:
- id: pip-tools-compile
alias: compile-ci-linux-crypto-3.10-requirements
name: Linux CI Py3.10 Crypto Requirements
files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$
files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.10/linux-crypto\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=linux
- --out-prefix=linux
@ -446,13 +433,11 @@ repos:
- id: pip-tools-compile
alias: compile-ci-linux-crypto-3.11-requirements
name: Linux CI Py3.11 Crypto Requirements
files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$
files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/linux-crypto\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=linux
- --out-prefix=linux
@ -467,7 +452,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=freebsd
- --include=requirements/base.txt
@ -486,7 +470,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=freebsd
- --include=requirements/base.txt
@ -505,7 +488,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=freebsd
- --include=requirements/base.txt
@ -524,7 +506,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=freebsd
- --include=requirements/base.txt
@ -543,7 +524,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=freebsd
- --out-prefix=freebsd
@ -558,7 +538,7 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --build-isolation
- --py-version=3.9
- --platform=freebsd
- --out-prefix=freebsd
@ -574,7 +554,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=freebsd
- --out-prefix=freebsd
@ -590,10 +569,10 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=freebsd
- --out-prefix=freebsd
- --no-emit-index-url
- requirements/static/ci/crypto.in
- id: pip-tools-compile
@ -604,7 +583,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=darwin
- --include=requirements/darwin.txt
@ -622,7 +600,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=darwin
- --include=requirements/darwin.txt
@ -640,13 +617,13 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=darwin
- --include=requirements/darwin.txt
- --include=requirements/pytest.txt
- --include=requirements/static/pkg/darwin.in
- --include=requirements/static/ci/common.in
- --no-emit-index-url
- requirements/static/ci/darwin.in
- id: pip-tools-compile
@ -657,7 +634,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=darwin
- --out-prefix=darwin
@ -672,7 +648,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=darwin
- --out-prefix=darwin
@ -687,7 +662,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=darwin
- --out-prefix=darwin
@ -702,7 +676,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=windows
- --include=requirements/windows.txt
@ -720,7 +693,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=windows
- --include=requirements/windows.txt
@ -738,7 +710,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=windows
- --include=requirements/windows.txt
@ -756,7 +727,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=windows
- --include=requirements/windows.txt
@ -774,7 +744,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=windows
- --out-prefix=windows
@ -789,7 +758,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=windows
- --out-prefix=windows
@ -804,7 +772,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=windows
- --out-prefix=windows
@ -819,10 +786,10 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=windows
- --out-prefix=windows
- --no-emit-index-url
- requirements/static/ci/crypto.in
# <---- CI Requirements --------------------------------------------------------------------------------------------
@ -837,7 +804,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --no-emit-index-url
- requirements/static/ci/cloud.in
@ -850,7 +816,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --no-emit-index-url
- requirements/static/ci/cloud.in
@ -863,7 +828,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --no-emit-index-url
- requirements/static/ci/cloud.in
@ -876,7 +840,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --no-emit-index-url
- requirements/static/ci/cloud.in
@ -891,7 +854,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=linux
- --no-emit-index-url
@ -905,7 +867,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=linux
- --no-emit-index-url
@ -919,7 +880,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=linux
- --no-emit-index-url
@ -933,8 +893,8 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --no-emit-index-url
- --platform=linux
- requirements/static/ci/docs.in
# <---- Doc CI Requirements ----------------------------------------------------------------------------------------
@ -948,7 +908,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=linux
- --no-emit-index-url
@ -962,7 +921,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=linux
- --no-emit-index-url
@ -976,7 +934,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=linux
- --no-emit-index-url
@ -990,7 +947,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=linux
- --no-emit-index-url
@ -1007,7 +963,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --platform=linux
- --no-emit-index-url
@ -1021,7 +976,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --platform=linux
- --no-emit-index-url
@ -1035,7 +989,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --platform=linux
- --no-emit-index-url
@ -1049,7 +1002,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --platform=linux
- --no-emit-index-url
@ -1065,7 +1017,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.8
- --no-emit-index-url
- requirements/static/ci/invoke.in
@ -1078,7 +1029,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --no-emit-index-url
- requirements/static/ci/invoke.in
@ -1091,68 +1041,11 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --no-emit-index-url
- requirements/static/ci/invoke.in
# <---- Invoke -----------------------------------------------------------------------------------------------------
# <---- PKG ci requirements-----------------------------------------------------------------------------------------
- id: pip-tools-compile
alias: compile-ci-pkg-3.10-requirements
name: PKG tests CI Py3.10 Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.10/pkgtests\.in)))$
pass_filenames: false
args:
- -v
- --py-version=3.10
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- requirements/static/ci/pkgtests.in
- id: pip-tools-compile
alias: compile-ci-windows-pkg-3.10-requirements
name: PKG tests Windows CI Py3.10 Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests-windows|common)\.in|py3\.10/pkgtests-windows\.in)))$
pass_filenames: false
args:
- -v
- --py-version=3.10
- --platform=windows
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- requirements/static/ci/pkgtests-windows.in
- id: pip-tools-compile
alias: compile-ci-pkg-3.11-requirements
name: PKG tests CI Py3.11 Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.11/pkgtests\.in)))$
pass_filenames: false
args:
- -v
- --py-version=3.11
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- requirements/static/ci/pkgtests.in
- id: pip-tools-compile
alias: compile-ci-windows-pkg-3.11-requirements
name: PKG tests Windows CI Py3.11 Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests-windows|common)\.in|py3\.11/pkgtests-windows\.in)))$
pass_filenames: false
args:
- -v
- --py-version=3.11
- --platform=windows
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- requirements/static/ci/pkgtests-windows.in
# <---- PKG ci requirements-----------------------------------------------------------------------------------------
# ----- Tools ---------------------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-ci-tools-3.9-requirements
@ -1162,7 +1055,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.9
- --no-emit-index-url
- requirements/static/ci/tools.in
@ -1175,7 +1067,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.10
- --no-emit-index-url
- requirements/static/ci/tools.in
@ -1188,7 +1079,6 @@ repos:
args:
- -v
- --build-isolation
# - --resolver=backtracking
- --py-version=3.11
- --no-emit-index-url
- requirements/static/ci/tools.in

1
changelog/38098.fixed.md Normal file
View file

@ -0,0 +1 @@
Improved error message when state arguments are accidentally passed as a string

1
changelog/64223.added.md Normal file
View file

@ -0,0 +1 @@
Added Salt support for Debian 12

1
changelog/65302.fixed.md Normal file
View file

@ -0,0 +1 @@
Ensure that the correct value of jid_inclue is passed if the argument is included in the passed keyword arguments.

View file

@ -1 +1 @@
centosstream-9-x86_64: ami-0dfa940714a95b497
centosstream-9-x86_64: ami-09b72b340acb62c73

View file

@ -1,8 +1,8 @@
{
"almalinux-8-arm64": {
"ami": "ami-09017a2c26bb6cf37",
"ami": "ami-04c86a9990a3836b9",
"ami_description": "CI Image of AlmaLinux 8 arm64",
"ami_name": "salt-project/ci/almalinux/8/arm64/20230912.1532",
"ami_name": "salt-project/ci/almalinux/8/arm64/20231005.1556",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -10,9 +10,9 @@
"ssh_username": "ec2-user"
},
"almalinux-8": {
"ami": "ami-0da7449d7f17dca6d",
"ami": "ami-059ed5c00c02c564b",
"ami_description": "CI Image of AlmaLinux 8 x86_64",
"ami_name": "salt-project/ci/almalinux/8/x86_64/20230912.1532",
"ami_name": "salt-project/ci/almalinux/8/x86_64/20231005.1557",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -20,9 +20,9 @@
"ssh_username": "ec2-user"
},
"almalinux-9-arm64": {
"ami": "ami-0b45894ce343176b0",
"ami": "ami-0213f3e31656f7393",
"ami_description": "CI Image of AlmaLinux 9 arm64",
"ami_name": "salt-project/ci/almalinux/9/arm64/20230912.1532",
"ami_name": "salt-project/ci/almalinux/9/arm64/20231005.1557",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -30,9 +30,9 @@
"ssh_username": "ec2-user"
},
"almalinux-9": {
"ami": "ami-0c8a554820c140d45",
"ami": "ami-0c4e36d63e728ee21",
"ami_description": "CI Image of AlmaLinux 9 x86_64",
"ami_name": "salt-project/ci/almalinux/9/x86_64/20230912.1532",
"ami_name": "salt-project/ci/almalinux/9/x86_64/20231005.1557",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -40,9 +40,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2-arm64": {
"ami": "ami-0aac44852e96fb156",
"ami": "ami-010d24ab23bfb0330",
"ami_description": "CI Image of AmazonLinux 2 arm64",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20230912.1532",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20231005.1614",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -50,9 +50,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2": {
"ami": "ami-04bf06c280f2957e0",
"ami": "ami-0ad016fe17f923c6b",
"ami_description": "CI Image of AmazonLinux 2 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230912.1532",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20231005.1614",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -60,9 +60,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023.1-arm64": {
"ami": "ami-0430562e1dc073734",
"ami": "ami-037b7d6177ec8259d",
"ami_description": "CI Image of AmazonLinux 2023.1 arm64",
"ami_name": "salt-project/ci/amazonlinux/2023.1/arm64/20230912.1532",
"ami_name": "salt-project/ci/amazonlinux/2023.1/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -70,9 +70,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023.1": {
"ami": "ami-09b7ab6b9bb5ceca3",
"ami": "ami-08e04f6dd44c858fa",
"ami_description": "CI Image of AmazonLinux 2023.1 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2023.1/x86_64/20230912.1532",
"ami_name": "salt-project/ci/amazonlinux/2023.1/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -80,9 +80,9 @@
"ssh_username": "ec2-user"
},
"archlinux-lts": {
"ami": "ami-08ecb254b10e24bca",
"ami": "ami-0b88ddfb321aff9ba",
"ami_description": "CI Image of ArchLinux lts x86_64",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20230912.1532",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "false",
"instance_type": "t3a.large",
@ -90,9 +90,9 @@
"ssh_username": "arch"
},
"centos-7-arm64": {
"ami": "ami-0facb0416e994c2d4",
"ami": "ami-01d5ee66081a02154",
"ami_description": "CI Image of CentOS 7 arm64",
"ami_name": "salt-project/ci/centos/7/arm64/20230912.1553",
"ami_name": "salt-project/ci/centos/7/arm64/20231005.1617",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -100,9 +100,9 @@
"ssh_username": "centos"
},
"centos-7": {
"ami": "ami-04e695ebbac38868e",
"ami": "ami-020fcff1da1f72f27",
"ami_description": "CI Image of CentOS 7 x86_64",
"ami_name": "salt-project/ci/centos/7/x86_64/20230912.1553",
"ami_name": "salt-project/ci/centos/7/x86_64/20231005.1616",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -110,9 +110,9 @@
"ssh_username": "centos"
},
"centosstream-8-arm64": {
"ami": "ami-02b1e24269822d3fc",
"ami": "ami-0ac6238b6506f7b8f",
"ami_description": "CI Image of CentOSStream 8 arm64",
"ami_name": "salt-project/ci/centosstream/8/arm64/20230912.1532",
"ami_name": "salt-project/ci/centosstream/8/arm64/20231005.1614",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -120,9 +120,9 @@
"ssh_username": "centos"
},
"centosstream-8": {
"ami": "ami-0706ab643607236c0",
"ami": "ami-0bfceb03d43d0ba0e",
"ami_description": "CI Image of CentOSStream 8 x86_64",
"ami_name": "salt-project/ci/centosstream/8/x86_64/20230912.1532",
"ami_name": "salt-project/ci/centosstream/8/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -130,9 +130,9 @@
"ssh_username": "centos"
},
"centosstream-9-arm64": {
"ami": "ami-032abe3ace927c296",
"ami": "ami-04db23ba9082a01bf",
"ami_description": "CI Image of CentOSStream 9 arm64",
"ami_name": "salt-project/ci/centosstream/9/arm64/20230912.1532",
"ami_name": "salt-project/ci/centosstream/9/arm64/20231005.1615",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -140,9 +140,9 @@
"ssh_username": "ec2-user"
},
"centosstream-9": {
"ami": "ami-091986d83f4c0bdd7",
"ami": "ami-0a47f4f785cb7a81c",
"ami_description": "CI Image of CentOSStream 9 x86_64",
"ami_name": "salt-project/ci/centosstream/9/x86_64/20230912.1532",
"ami_name": "salt-project/ci/centosstream/9/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -150,9 +150,9 @@
"ssh_username": "ec2-user"
},
"debian-10-arm64": {
"ami": "ami-08b7b9fb74d7c58f2",
"ami": "ami-015d30e48fa213528",
"ami_description": "CI Image of Debian 10 arm64",
"ami_name": "salt-project/ci/debian/10/arm64/20230912.1546",
"ami_name": "salt-project/ci/debian/10/arm64/20231005.1601",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -160,9 +160,9 @@
"ssh_username": "admin"
},
"debian-10": {
"ami": "ami-0002ea04be195948e",
"ami": "ami-0397043698fedfa4c",
"ami_description": "CI Image of Debian 10 x86_64",
"ami_name": "salt-project/ci/debian/10/x86_64/20230912.1548",
"ami_name": "salt-project/ci/debian/10/x86_64/20231005.1606",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -170,9 +170,9 @@
"ssh_username": "admin"
},
"debian-11-arm64": {
"ami": "ami-0e14ec1b2a5553f96",
"ami": "ami-008dbab5525972174",
"ami_description": "CI Image of Debian 11 arm64",
"ami_name": "salt-project/ci/debian/11/arm64/20230912.1548",
"ami_name": "salt-project/ci/debian/11/arm64/20231005.1607",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -180,9 +180,9 @@
"ssh_username": "admin"
},
"debian-11": {
"ami": "ami-06c5ea0d19a5773d7",
"ami": "ami-04fc56501daaf3c94",
"ami_description": "CI Image of Debian 11 x86_64",
"ami_name": "salt-project/ci/debian/11/x86_64/20230912.1549",
"ami_name": "salt-project/ci/debian/11/x86_64/20231005.1607",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -190,9 +190,9 @@
"ssh_username": "admin"
},
"debian-12-arm64": {
"ami": "ami-055b0a6d5bb3e9ecd",
"ami": "ami-0956b73228a7368c3",
"ami_description": "CI Image of Debian 12 arm64",
"ami_name": "salt-project/ci/debian/12/arm64/20230912.1550",
"ami_name": "salt-project/ci/debian/12/arm64/20231005.1610",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -200,9 +200,9 @@
"ssh_username": "admin"
},
"debian-12": {
"ami": "ami-0eada119571a913fd",
"ami": "ami-0d0aa04bb5c49e54f",
"ami_description": "CI Image of Debian 12 x86_64",
"ami_name": "salt-project/ci/debian/12/x86_64/20230912.1550",
"ami_name": "salt-project/ci/debian/12/x86_64/20231005.1613",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -210,9 +210,9 @@
"ssh_username": "admin"
},
"fedora-37-arm64": {
"ami": "ami-02b916b21581ead5a",
"ami": "ami-0201f64fda9f1ca6d",
"ami_description": "CI Image of Fedora 37 arm64",
"ami_name": "salt-project/ci/fedora/37/arm64/20230912.1533",
"ami_name": "salt-project/ci/fedora/37/arm64/20231005.1617",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -220,9 +220,9 @@
"ssh_username": "fedora"
},
"fedora-37": {
"ami": "ami-01e0becc3552ad2f6",
"ami": "ami-02dfc80c8b14fd5bc",
"ami_description": "CI Image of Fedora 37 x86_64",
"ami_name": "salt-project/ci/fedora/37/x86_64/20230912.1533",
"ami_name": "salt-project/ci/fedora/37/x86_64/20231005.1618",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -230,9 +230,9 @@
"ssh_username": "fedora"
},
"fedora-38-arm64": {
"ami": "ami-01f4a3bdee88da9f3",
"ami": "ami-0b03c270c7f50165d",
"ami_description": "CI Image of Fedora 38 arm64",
"ami_name": "salt-project/ci/fedora/38/arm64/20230912.1533",
"ami_name": "salt-project/ci/fedora/38/arm64/20231005.1618",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -240,9 +240,9 @@
"ssh_username": "fedora"
},
"fedora-38": {
"ami": "ami-0c38a1907f5b55077",
"ami": "ami-0927a80620f670c23",
"ami_description": "CI Image of Fedora 38 x86_64",
"ami_name": "salt-project/ci/fedora/38/x86_64/20230912.1543",
"ami_name": "salt-project/ci/fedora/38/x86_64/20231005.1626",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -250,9 +250,9 @@
"ssh_username": "fedora"
},
"opensuse-15": {
"ami": "ami-0ecfb817deee506a9",
"ami": "ami-0b51e3479fabb4078",
"ami_description": "CI Image of Opensuse 15 x86_64",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20230912.1533",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20231005.1614",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -260,9 +260,9 @@
"ssh_username": "ec2-user"
},
"photonos-3-arm64": {
"ami": "ami-0383031c08217b13e",
"ami": "ami-0a33037524874686c",
"ami_description": "CI Image of PhotonOS 3 arm64",
"ami_name": "salt-project/ci/photonos/3/arm64/20230924.0913",
"ami_name": "salt-project/ci/photonos/3/arm64/20231005.1558",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -270,9 +270,9 @@
"ssh_username": "root"
},
"photonos-3": {
"ami": "ami-06004a7d856e94355",
"ami": "ami-068c5c07aa91d84d1",
"ami_description": "CI Image of PhotonOS 3 x86_64",
"ami_name": "salt-project/ci/photonos/3/x86_64/20230924.0913",
"ami_name": "salt-project/ci/photonos/3/x86_64/20231005.1558",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -280,9 +280,9 @@
"ssh_username": "root"
},
"photonos-4-arm64": {
"ami": "ami-08808bcf97f824036",
"ami": "ami-0f8c72854c5b5679c",
"ami_description": "CI Image of PhotonOS 4 arm64",
"ami_name": "salt-project/ci/photonos/4/arm64/20230924.0924",
"ami_name": "salt-project/ci/photonos/4/arm64/20231005.1558",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -290,9 +290,9 @@
"ssh_username": "root"
},
"photonos-4": {
"ami": "ami-0fd7a6ed4c61ee312",
"ami": "ami-04b8974b830b5adb0",
"ami_description": "CI Image of PhotonOS 4 x86_64",
"ami_name": "salt-project/ci/photonos/4/x86_64/20230924.0925",
"ami_name": "salt-project/ci/photonos/4/x86_64/20231005.1559",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -300,9 +300,9 @@
"ssh_username": "root"
},
"photonos-5-arm64": {
"ami": "ami-0c85ad72d107ec4b8",
"ami": "ami-0f466b198cbcaf380",
"ami_description": "CI Image of PhotonOS 5 arm64",
"ami_name": "salt-project/ci/photonos/5/arm64/20230924.0927",
"ami_name": "salt-project/ci/photonos/5/arm64/20231005.1559",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -310,9 +310,9 @@
"ssh_username": "root"
},
"photonos-5": {
"ami": "ami-0732ce03b2ab6fad2",
"ami": "ami-01bb09f84464b243e",
"ami_description": "CI Image of PhotonOS 5 x86_64",
"ami_name": "salt-project/ci/photonos/5/x86_64/20230924.0927",
"ami_name": "salt-project/ci/photonos/5/x86_64/20231005.1601",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -320,9 +320,9 @@
"ssh_username": "root"
},
"ubuntu-20.04-arm64": {
"ami": "ami-05aeb3d5bf0a16369",
"ami": "ami-06d9a9e3b5ae369c7",
"ami_description": "CI Image of Ubuntu 20.04 arm64",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230912.1551",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -330,9 +330,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-20.04": {
"ami": "ami-00cec0054fd71d281",
"ami": "ami-080a55fb6cb08134d",
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230912.1550",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -340,9 +340,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04-arm64": {
"ami": "ami-0f7dc3333620d58fd",
"ami": "ami-0c87b8f0b8794f32e",
"ami_description": "CI Image of Ubuntu 22.04 arm64",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230912.1551",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -350,9 +350,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04": {
"ami": "ami-0bc7c1824a6b0752f",
"ami": "ami-0ce98043f227c9ac0",
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230912.1552",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -360,9 +360,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04-arm64": {
"ami": "ami-0e8818777218efeeb",
"ami": "ami-0519c583e36309fef",
"ami_description": "CI Image of Ubuntu 23.04 arm64",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20230912.1552",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20231005.1555",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -370,9 +370,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-23.04": {
"ami": "ami-0813a38bf6a6cf4de",
"ami": "ami-063ad5dfb49f09182",
"ami_description": "CI Image of Ubuntu 23.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20230912.1552",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20231005.1555",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -380,9 +380,9 @@
"ssh_username": "ubuntu"
},
"windows-2016": {
"ami": "ami-099db55543619f54a",
"ami": "ami-0f1ac34593b8b044f",
"ami_description": "CI Image of Windows 2016 x86_64",
"ami_name": "salt-project/ci/windows/2016/x86_64/20230522.0606",
"ami_name": "salt-project/ci/windows/2016/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -390,9 +390,9 @@
"ssh_username": "Administrator"
},
"windows-2019": {
"ami": "ami-0860ee5bc9ee93e13",
"ami": "ami-09100ff6a103a28ab",
"ami_description": "CI Image of Windows 2019 x86_64",
"ami_name": "salt-project/ci/windows/2019/x86_64/20230522.0606",
"ami_name": "salt-project/ci/windows/2019/x86_64/20231005.1615",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -400,9 +400,9 @@
"ssh_username": "Administrator"
},
"windows-2022": {
"ami": "ami-032e3abce2aa98da7",
"ami": "ami-0266dc6a12bc9fca6",
"ami_description": "CI Image of Windows 2022 x86_64",
"ami_name": "salt-project/ci/windows/2022/x86_64/20230522.0606",
"ami_name": "salt-project/ci/windows/2022/x86_64/20231005.1616",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",

View file

@ -1,5 +1,5 @@
nox_version: "2022.8.7"
python_version: "3.10.13"
relenv_version: "0.13.10"
relenv_version: "0.13.11"
release-branches:
- "3006.x"

View file

@ -6,14 +6,15 @@ Nox configuration script
"""
# pylint: disable=resource-leakage,3rd-party-module-not-gated
import contextlib
import datetime
import glob
import gzip
import json
import os
import pathlib
import shutil
import sqlite3
import subprocess
import sys
import tarfile
import tempfile
@ -190,21 +191,12 @@ def _get_pydir(session):
return "py{}.{}".format(*version_info)
def _get_pip_requirements_file(session, transport, crypto=None, requirements_type="ci"):
def _get_pip_requirements_file(session, crypto=None, requirements_type="ci"):
assert requirements_type in ("ci", "pkg")
pydir = _get_pydir(session)
if IS_WINDOWS:
if crypto is None:
_requirements_file = os.path.join(
"requirements",
"static",
requirements_type,
pydir,
f"{transport}-windows.txt",
)
if os.path.exists(_requirements_file):
return _requirements_file
_requirements_file = os.path.join(
"requirements", "static", requirements_type, pydir, "windows.txt"
)
@ -218,15 +210,6 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ
session.error(f"Could not find a windows requirements file for {pydir}")
elif IS_DARWIN:
if crypto is None:
_requirements_file = os.path.join(
"requirements",
"static",
requirements_type,
pydir,
f"{transport}-darwin.txt",
)
if os.path.exists(_requirements_file):
return _requirements_file
_requirements_file = os.path.join(
"requirements", "static", requirements_type, pydir, "darwin.txt"
)
@ -240,15 +223,6 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ
session.error(f"Could not find a darwin requirements file for {pydir}")
elif IS_FREEBSD:
if crypto is None:
_requirements_file = os.path.join(
"requirements",
"static",
requirements_type,
pydir,
f"{transport}-freebsd.txt",
)
if os.path.exists(_requirements_file):
return _requirements_file
_requirements_file = os.path.join(
"requirements", "static", requirements_type, pydir, "freebsd.txt"
)
@ -262,15 +236,6 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ
session.error(f"Could not find a freebsd requirements file for {pydir}")
else:
if crypto is None:
_requirements_file = os.path.join(
"requirements",
"static",
requirements_type,
pydir,
f"{transport}-linux.txt",
)
if os.path.exists(_requirements_file):
return _requirements_file
_requirements_file = os.path.join(
"requirements", "static", requirements_type, pydir, "linux.txt"
)
@ -319,7 +284,6 @@ def _upgrade_pip_setuptools_and_wheel(session, upgrade=True, onedir=False):
def _install_requirements(
session,
transport,
*extra_requirements,
requirements_type="ci",
onedir=False,
@ -332,7 +296,7 @@ def _install_requirements(
# Install requirements
requirements_file = _get_pip_requirements_file(
session, transport, requirements_type=requirements_type
session, requirements_type=requirements_type
)
install_command = ["--progress-bar=off", "-r", requirements_file]
session.install(*install_command, silent=PIP_INSTALL_SILENT)
@ -361,21 +325,32 @@ def _install_coverage_requirement(session):
if SKIP_REQUIREMENTS_INSTALL is False:
coverage_requirement = COVERAGE_REQUIREMENT
if coverage_requirement is None:
coverage_requirement = "coverage==5.2"
coverage_requirement = "coverage==7.3.1"
if IS_LINUX:
distro_slug = os.environ.get("TOOLS_DISTRO_SLUG")
if distro_slug is not None and distro_slug in (
"centos-7",
"debian-10",
"photonos-3",
):
# Keep the old coverage requirement version since the new one, on these
# plaforms turns the test suite quite slow.
# Unit tests don't finish before the 5 hours timeout when they should
# finish within 1 to 2 hours.
coverage_requirement = "coverage==5.5"
session.install(
"--progress-bar=off", coverage_requirement, silent=PIP_INSTALL_SILENT
)
def _run_with_coverage(session, *test_cmd, env=None):
def _run_with_coverage(session, *test_cmd, env=None, on_rerun=False):
_install_coverage_requirement(session)
session.run("coverage", "erase")
if on_rerun is False:
session.run("coverage", "erase")
if env is None:
env = {}
coverage_base_env = {}
sitecustomize_dir = session.run(
"salt-factories", "--coverage", silent=True, log=True, stderr=None
)
@ -407,78 +382,36 @@ def _run_with_coverage(session, *test_cmd, env=None):
python_path_entries.insert(0, str(sitecustomize_dir))
python_path_env_var = os.pathsep.join(python_path_entries)
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
coverage_base_env["COVERAGE_FILE"] = COVERAGE_FILE
env.update(
{
# The updated python path so that sitecustomize is importable
"PYTHONPATH": python_path_env_var,
# Instruct sub processes to also run under coverage
"COVERAGE_PROCESS_START": str(REPO_ROOT / ".coveragerc"),
},
**coverage_base_env,
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": COVERAGE_FILE,
}
)
try:
session.run(*test_cmd, env=env)
finally:
if os.environ.get("GITHUB_ACTIONS_PIPELINE", "0") == "0":
# Always combine and generate the XML coverage report
try:
session.run(
"coverage", "combine", "--debug=pathmap", env=coverage_base_env
)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
# Generate report for tests code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_OUTPUT_DIR.joinpath("tests.xml").relative_to(REPO_ROOT)),
"--omit=salt/*",
"--include=tests/*",
env=coverage_base_env,
)
# Generate report for salt code coverage
session.run(
"coverage",
"xml",
"-o",
str(COVERAGE_OUTPUT_DIR.joinpath("salt.xml").relative_to(REPO_ROOT)),
"--omit=tests/*",
"--include=salt/*",
env=coverage_base_env,
)
# Generate html report for tests code coverage
session.run(
"coverage",
"html",
"-d",
str(COVERAGE_OUTPUT_DIR.joinpath("html").relative_to(REPO_ROOT)),
"--omit=salt/*",
"--include=tests/*",
env=coverage_base_env,
)
# Generate html report for salt code coverage
session.run(
"coverage",
"html",
"-d",
str(COVERAGE_OUTPUT_DIR.joinpath("html").relative_to(REPO_ROOT)),
"--omit=tests/*",
"--include=salt/*",
env=coverage_base_env,
)
session.run(*test_cmd, env=env)
def _report_coverage(session):
def _report_coverage(
session,
combine=True,
cli_report=True,
html_report=False,
xml_report=False,
json_report=False,
):
_install_coverage_requirement(session)
if not any([combine, cli_report, html_report, xml_report, json_report]):
session.error(
"At least one of combine, cli_report, html_report, xml_report, json_report needs to be True"
)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
@ -489,72 +422,117 @@ def _report_coverage(session):
if session.posargs:
report_section = session.posargs.pop(0)
if report_section not in ("salt", "tests"):
session.error("The report section can only be one of 'salt', 'tests'.")
session.error(
f"The report section can only be one of 'salt', 'tests', not: {report_section}"
)
if session.posargs:
session.error(
"Only one argument can be passed to the session, which is optional "
"and is one of 'salt', 'tests'."
)
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine", env=env)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
if combine is True:
coverage_db_files = glob.glob(f"{COVERAGE_FILE}.*")
if coverage_db_files:
with contextlib.suppress(CommandFailed):
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
session.run("coverage", "combine", env=env)
elif os.path.exists(COVERAGE_FILE):
session_warn(session, "Coverage files already combined.")
if not IS_WINDOWS:
# The coverage file might have come from a windows machine, fix paths
with sqlite3.connect(COVERAGE_FILE) as db:
res = db.execute(r"SELECT * FROM file WHERE path LIKE '%salt\%'")
if res.fetchone():
session_warn(
session,
"Replacing backwards slashes with forward slashes on file "
"paths in the coverage database",
)
db.execute(r"UPDATE OR IGNORE file SET path=replace(path, '\', '/');")
if os.path.exists(COVERAGE_FILE) and not IS_WINDOWS:
# Some coverage files might have come from a windows machine, fix paths
with sqlite3.connect(COVERAGE_FILE) as db:
res = db.execute(r"SELECT * FROM file WHERE path LIKE '%salt\%'")
if res.fetchone():
session_warn(
session,
"Replacing backwards slashes with forward slashes on file "
"paths in the coverage database",
)
db.execute(
r"UPDATE OR IGNORE file SET path=replace(path, '\', '/');"
)
if not os.path.exists(COVERAGE_FILE):
session.error("No coverage files found.")
if report_section == "salt":
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-salt.json"
)
json_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "salt.json"
xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "salt.xml"
html_coverage_dir = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "salt"
cmd_args = [
"--omit=tests/*",
"--omit=tests/*,pkg/tests/*",
"--include=salt/*",
]
elif report_section == "tests":
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-tests.json"
json_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "tests.json"
xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "tests.xml"
html_coverage_dir = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "tests"
)
cmd_args = [
"--omit=salt/*",
"--include=tests/*",
"--include=tests/*,pkg/tests/*",
]
else:
json_coverage_file = (
COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage.json"
)
xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage.xml"
html_coverage_dir = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "full"
cmd_args = [
"--include=salt/*,tests/*",
"--include=salt/*,tests/*,pkg/tests/*",
]
session.run(
"coverage",
"json",
"-o",
str(json_coverage_file),
*cmd_args,
env=env,
)
session.run(
"coverage",
"report",
*cmd_args,
env=env,
)
if cli_report:
session.run(
"coverage",
"report",
"--precision=2",
*cmd_args,
env=env,
)
if html_report:
session.run(
"coverage",
"html",
"-d",
str(html_coverage_dir),
"--show-contexts",
"--precision=2",
*cmd_args,
env=env,
)
if xml_report:
try:
session.run(
"coverage",
"xml",
"-o",
str(xml_coverage_file),
*cmd_args,
env=env,
)
except CommandFailed:
session_warn(
session, "Failed to generate the source XML code coverage report"
)
if json_report:
session.run(
"coverage",
"json",
"-o",
str(json_coverage_file),
"--show-contexts",
*cmd_args,
env=env,
)
@nox.session(python=_PYTHON_VERSIONS, name="test-parametrized")
@ -566,7 +544,7 @@ def test_parametrized(session, coverage, transport, crypto):
DO NOT CALL THIS NOX SESSION DIRECTLY
"""
# Install requirements
if _install_requirements(session, transport):
if _install_requirements(session):
if crypto:
session_run_always(
@ -583,7 +561,7 @@ def test_parametrized(session, coverage, transport, crypto):
install_command = [
"--progress-bar=off",
"--constraint",
_get_pip_requirements_file(session, transport, crypto=True),
_get_pip_requirements_file(session, crypto=True),
]
install_command.append(crypto)
session.install(*install_command, silent=PIP_INSTALL_SILENT)
@ -991,7 +969,7 @@ def test_tornado(session, coverage):
"""
# Install requirements
if _upgrade_pip_setuptools_and_wheel(session):
_install_requirements(session, "zeromq")
_install_requirements(session)
session.install(
"--progress-bar=off", "tornado==5.0.2", silent=PIP_INSTALL_SILENT
)
@ -1020,7 +998,7 @@ def pytest_tornado(session, coverage):
session.notify(session_name.replace("pytest-", "test-"))
def _pytest(session, coverage, cmd_args, env=None):
def _pytest(session, coverage, cmd_args, env=None, on_rerun=False):
# Create required artifacts directories
_create_ci_directories()
@ -1074,6 +1052,7 @@ def _pytest(session, coverage, cmd_args, env=None):
"pytest",
*args,
env=env,
on_rerun=on_rerun,
)
else:
session.run("python", "-m", "pytest", *args, env=env)
@ -1081,7 +1060,7 @@ def _pytest(session, coverage, cmd_args, env=None):
def _ci_test(session, transport, onedir=False):
# Install requirements
_install_requirements(session, transport, onedir=onedir)
_install_requirements(session, onedir=onedir)
env = {}
if onedir:
env["ONEDIR_TESTRUN"] = "1"
@ -1096,6 +1075,8 @@ def _ci_test(session, transport, onedir=False):
"scenarios": ["tests/pytests/scenarios"],
}
test_group_number = os.environ.get("TEST_GROUP") or "1"
if not session.posargs:
chunk_cmd = []
junit_report_filename = "test-results"
@ -1112,20 +1093,20 @@ def _ci_test(session, transport, onedir=False):
for values in chunks.values():
for value in values:
chunk_cmd.append(f"--ignore={value}")
junit_report_filename = f"test-results-{chunk}"
runtests_log_filename = f"runtests-{chunk}"
junit_report_filename = f"test-results-{chunk}-grp{test_group_number}"
runtests_log_filename = f"runtests-{chunk}-grp{test_group_number}"
else:
chunk_cmd = chunks[chunk]
junit_report_filename = f"test-results-{chunk}"
runtests_log_filename = f"runtests-{chunk}"
junit_report_filename = f"test-results-{chunk}-grp{test_group_number}"
runtests_log_filename = f"runtests-{chunk}-grp{test_group_number}"
if session.posargs:
if session.posargs[0] == "--":
session.posargs.pop(0)
chunk_cmd.extend(session.posargs)
else:
chunk_cmd = [chunk] + session.posargs
junit_report_filename = "test-results"
runtests_log_filename = "runtests"
junit_report_filename = f"test-results-grp{test_group_number}"
runtests_log_filename = f"runtests-grp{test_group_number}"
rerun_failures = os.environ.get("RERUN_FAILURES", "0") == "1"
track_code_coverage = os.environ.get("SKIP_CODE_COVERAGE", "0") == "0"
@ -1166,12 +1147,25 @@ def _ci_test(session, transport, onedir=False):
]
+ chunk_cmd
)
_pytest(session, coverage=track_code_coverage, cmd_args=pytest_args, env=env)
_pytest(
session,
coverage=track_code_coverage,
cmd_args=pytest_args,
env=env,
on_rerun=True,
)
@nox.session(python=_PYTHON_VERSIONS, name="ci-test")
def ci_test(session):
_ci_test(session, "zeromq")
transport = os.environ.get("SALT_TRANSPORT") or "zeromq"
valid_transports = ("zeromq", "tcp")
if transport not in valid_transports:
session.error(
"The value for the SALT_TRANSPORT environment variable can only be "
f"one of: {', '.join(valid_transports)}"
)
_ci_test(session, transport)
@nox.session(python=_PYTHON_VERSIONS, name="ci-test-tcp")
@ -1192,6 +1186,14 @@ def ci_test_onedir(session):
)
)
transport = os.environ.get("SALT_TRANSPORT") or "zeromq"
valid_transports = ("zeromq", "tcp")
if transport not in valid_transports:
session.error(
"The value for the SALT_TRANSPORT environment variable can only be "
f"one of: {', '.join(valid_transports)}"
)
_ci_test(session, "zeromq", onedir=True)
@ -1213,7 +1215,12 @@ def ci_test_onedir_tcp(session):
@nox.session(python="3", name="report-coverage")
def report_coverage(session):
_report_coverage(session)
_report_coverage(session, combine=True, cli_report=True)
@nox.session(python="3", name="coverage-report")
def coverage_report(session):
_report_coverage(session, combine=True, cli_report=True)
@nox.session(python=False, name="decompress-dependencies")
@ -1224,7 +1231,7 @@ def decompress_dependencies(session):
"Check cicd/images.yml for what's available."
)
distro_slug = session.posargs.pop(0)
if IS_WINDOWS:
if "windows" in distro_slug:
nox_dependencies_tarball = f"nox.{distro_slug}.tar.gz"
else:
nox_dependencies_tarball = f"nox.{distro_slug}.tar.xz"
@ -1241,7 +1248,7 @@ def decompress_dependencies(session):
session.log("Finding broken 'python' symlinks under '.nox/' ...")
for dirname in os.scandir(REPO_ROOT / ".nox"):
if not IS_WINDOWS:
if "windows" not in distro_slug:
scan_path = REPO_ROOT.joinpath(".nox", dirname, "bin")
else:
scan_path = REPO_ROOT.joinpath(".nox", dirname, "Scripts")
@ -1340,41 +1347,58 @@ def pre_archive_cleanup(session, pkg):
@nox.session(python="3", name="combine-coverage")
def combine_coverage(session):
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_FILE),
}
_report_coverage(session, combine=True, cli_report=False)
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine", env=env)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
@nox.session(
python=str(ONEDIR_PYTHON_PATH),
name="combine-coverage-onedir",
venv_params=["--system-site-packages"],
)
def combine_coverage_onedir(session):
_report_coverage(session, combine=True, cli_report=False)
@nox.session(python="3", name="create-html-coverage-report")
def create_html_coverage_report(session):
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_FILE),
}
_report_coverage(session, combine=True, cli_report=False, html_report=True)
# Generate html report for Salt and tests combined code coverage
session.run(
"coverage",
"html",
"-d",
str(COVERAGE_OUTPUT_DIR.joinpath("html").relative_to(REPO_ROOT)),
"--include=salt/*,tests/*",
"--show-contexts",
env=env,
)
def _create_xml_coverage_reports(session):
if session.posargs:
session.error("No arguments are acceptable to this nox session.")
session.posargs.append("salt")
_report_coverage(session, combine=True, cli_report=False, xml_report=True)
session.posargs.append("tests")
_report_coverage(session, combine=True, cli_report=False, xml_report=True)
@nox.session(python="3", name="create-xml-coverage-reports")
def create_xml_coverage_reports(session):
_create_xml_coverage_reports(session)
@nox.session(
python=str(ONEDIR_PYTHON_PATH),
name="create-xml-coverage-reports-onedir",
venv_params=["--system-site-packages"],
)
def create_xml_coverage_reports_onedir(session):
_create_xml_coverage_reports(session)
@nox.session(python="3", name="create-json-coverage-reports")
def create_json_coverage_reports(session):
_report_coverage(session, combine=True, cli_report=False, json_report=True)
@nox.session(
python=str(ONEDIR_PYTHON_PATH),
name="create-json-coverage-reports-onedir",
venv_params=["--system-site-packages"],
)
def create_json_coverage_reports_onedir(session):
_report_coverage(session, combine=True, cli_report=False, json_report=True)
class Tee:
@ -1656,7 +1680,7 @@ def invoke(session):
Run invoke tasks
"""
if _upgrade_pip_setuptools_and_wheel(session):
_install_requirements(session, "zeromq")
_install_requirements(session)
requirements_file = os.path.join(
"requirements", "static", "ci", _get_pydir(session), "invoke.txt"
)
@ -1820,10 +1844,25 @@ def build(session):
@nox.session(
python=str(ONEDIR_PYTHON_PATH),
name="test-pkgs-onedir",
name="ci-test-onedir-pkgs",
venv_params=["--system-site-packages"],
)
def test_pkgs_onedir(session):
def ci_test_onedir_pkgs(session):
from nox.virtualenv import VirtualEnv
session_warn(session, "Replacing VirtualEnv instance...")
ci_test_onedir_path = REPO_ROOT / ".nox" / "ci-test-onedir"
session._runner.venv = VirtualEnv(
str(ci_test_onedir_path.relative_to(REPO_ROOT)),
interpreter=session._runner.func.python,
reuse_existing=True,
venv=session._runner.venv.venv_or_virtualenv == "venv",
venv_params=session._runner.venv.venv_params,
)
os.environ["VIRTUAL_ENV"] = session._runner.venv.location
session._runner.venv.create()
if not ONEDIR_ARTIFACT_PATH.exists():
session.error(
"The salt onedir artifact, expected to be in '{}', was not found".format(
@ -1877,18 +1916,7 @@ def test_pkgs_onedir(session):
# Install requirements
if _upgrade_pip_setuptools_and_wheel(session, onedir=True):
if IS_WINDOWS:
file_name = "pkgtests-windows.txt"
else:
file_name = "pkgtests.txt"
requirements_file = os.path.join(
"requirements", "static", "ci", pydir, file_name
)
install_command = ["--progress-bar=off", "-r", requirements_file]
session.install(*install_command, silent=PIP_INSTALL_SILENT)
_install_requirements(session, "zeromq")
env = {
"ONEDIR_TESTRUN": "1",
"PKG_TEST_TYPE": chunk,
@ -1896,9 +1924,6 @@ def test_pkgs_onedir(session):
if chunk in ("upgrade-classic", "downgrade-classic"):
cmd_args.append("--classic")
# Workaround for installing and running classic packages from 3005.1
# They can only run with importlib-metadata<5.0.0.
subprocess.run(["pip3", "install", "importlib-metadata==4.13.0"], check=False)
pytest_args = (
cmd_args[:]

View file

@ -76,7 +76,7 @@ def pytest_addoption(parser):
"""
test_selection_group = parser.getgroup("Tests Runtime Selection")
test_selection_group.addoption(
"--system-service",
"--pkg-system-service",
default=False,
action="store_true",
help="Run the daemons as system services",
@ -148,7 +148,7 @@ def pytest_runtest_setup(item):
@pytest.fixture(scope="session")
def salt_factories_root_dir(request, tmp_path_factory):
root_dir = SaltPkgInstall.salt_factories_root_dir(
request.config.getoption("--system-service")
request.config.getoption("--pkg-system-service")
)
if root_dir is not None:
yield root_dir
@ -169,7 +169,7 @@ def salt_factories_config(salt_factories_root_dir):
return {
"code_dir": CODE_DIR,
"root_dir": salt_factories_root_dir,
"system_install": True,
"system_service": True,
}
@ -177,7 +177,7 @@ def salt_factories_config(salt_factories_root_dir):
def install_salt(request, salt_factories_root_dir):
with SaltPkgInstall(
conf_dir=salt_factories_root_dir / "etc" / "salt",
system_service=request.config.getoption("--system-service"),
pkg_system_service=request.config.getoption("--pkg-system-service"),
upgrade=request.config.getoption("--upgrade"),
downgrade=request.config.getoption("--downgrade"),
no_uninstall=request.config.getoption("--no-uninstall"),
@ -391,7 +391,8 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree):
master_script = False
if master_script:
salt_factories.system_install = False
salt_factories.system_service = False
salt_factories.generate_scripts = True
scripts_dir = salt_factories.root_dir / "Scripts"
scripts_dir.mkdir(exist_ok=True)
salt_factories.scripts_dir = scripts_dir
@ -401,16 +402,20 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree):
python_executable = install_salt.bin_dir / "python.exe"
if install_salt.relenv:
python_executable = install_salt.install_dir / "Scripts" / "python.exe"
salt_factories.python_executable = python_executable
factory = salt_factories.salt_master_daemon(
random_string("master-"),
defaults=config_defaults,
overrides=config_overrides,
factory_class=SaltMasterWindows,
salt_pkg_install=install_salt,
python_executable=python_executable,
)
salt_factories.system_install = True
salt_factories.system_service = True
else:
if install_salt.classic and platform.is_darwin():
os.environ["PATH"] += ":/opt/salt/bin"
factory = salt_factories.salt_master_daemon(
random_string("master-"),
defaults=config_defaults,
@ -473,11 +478,19 @@ def salt_minion(salt_factories, salt_master, install_salt):
"winrepo_dir_ng"
] = rf"{salt_factories.root_dir}\srv\salt\win\repo_ng"
config_overrides["winrepo_source_dir"] = r"salt://win/repo_ng"
if install_salt.classic and platform.is_windows():
salt_factories.python_executable = None
if install_salt.classic and platform.is_darwin():
os.environ["PATH"] += ":/opt/salt/bin"
factory = salt_master.salt_minion_daemon(
minion_id,
overrides=config_overrides,
defaults=config_defaults,
)
# Salt factories calls salt.utils.verify.verify_env
# which sets root perms on /srv/salt and /srv/pillar since we are running
# the test suite as root, but we want to run Salt master as salt

View file

@ -46,7 +46,7 @@ def test_pip_install(salt_call_cli, install_salt, shell):
"""
Test pip.install and ensure module can use installed library
"""
dep = "PyGithub"
dep = "PyGithub==1.56.0"
repo = "https://github.com/saltstack/salt.git"
try:

View file

@ -50,8 +50,8 @@ log = logging.getLogger(__name__)
@attr.s(kw_only=True, slots=True)
class SaltPkgInstall:
pkg_system_service: bool = attr.ib(default=False)
proc: Subprocess = attr.ib(init=False, repr=False)
system_service: bool = attr.ib(default=False)
# Paths
root: pathlib.Path = attr.ib(default=None)
@ -691,7 +691,7 @@ class SaltPkgInstall:
ret = self.proc.run(str(self.ssm_bin), "remove", "salt-minion", "confirm")
self._check_retcode(ret)
if self.system_service:
if self.pkg_system_service:
self._install_system_service()
elif platform.is_darwin():
@ -1246,7 +1246,7 @@ class PkgMixin:
@attr.s(kw_only=True)
class DaemonPkgMixin(PkgMixin):
def __attrs_post_init__(self):
if not platform.is_windows() and self.salt_pkg_install.system_service:
if not platform.is_windows() and self.salt_pkg_install.pkg_system_service:
if platform.is_darwin():
self.write_launchd_conf()
else:
@ -1274,7 +1274,7 @@ class SaltMaster(DaemonPkgMixin, master.SaltMaster):
DaemonPkgMixin.__attrs_post_init__(self)
def _get_impl_class(self):
if self.system_install and self.salt_pkg_install.system_service:
if self.system_service and self.salt_pkg_install.pkg_system_service:
if platform.is_windows():
return PkgSsmSaltDaemonImpl
if platform.is_darwin():
@ -1355,7 +1355,7 @@ class SaltMinion(DaemonPkgMixin, minion.SaltMinion):
DaemonPkgMixin.__attrs_post_init__(self)
def _get_impl_class(self):
if self.system_install and self.salt_pkg_install.system_service:
if self.system_service and self.salt_pkg_install.pkg_system_service:
if platform.is_windows():
return PkgSsmSaltDaemonImpl
if platform.is_darwin():
@ -1391,7 +1391,7 @@ class SaltApi(DaemonPkgMixin, api.SaltApi):
DaemonPkgMixin.__attrs_post_init__(self)
def _get_impl_class(self):
if self.system_install and self.salt_pkg_install.system_service:
if self.system_service and self.salt_pkg_install.pkg_system_service:
if platform.is_windows():
return PkgSsmSaltDaemonImpl
if platform.is_darwin():

View file

@ -2,10 +2,11 @@ mock >= 3.0.0
# PyTest
docker
pytest >= 7.2.0
pytest-salt-factories >= 1.0.0rc26
pytest-salt-factories >= 1.0.0rc27
pytest-helpers-namespace >= 2019.1.8
pytest-subtests
pytest-timeout
pytest-httpserver
pytest-custom-exit-code >= 0.3
flaky
more-itertools

View file

@ -2,6 +2,7 @@
# XXX: Temporarily do not install pylxd.
# pylxd(or likely ws4py) will cause the test suite to hang at the finish line under runtests.py
# pylxd>=2.2.5
--constraint=../pkg/py{py_version}/{platform}.txt
pygit2>=1.2.0

View file

@ -1,4 +1,5 @@
# FreeBSD static CI requirements
--constraint=../pkg/py{py_version}/{platform}.txt
yamllint

View file

@ -1,4 +1,5 @@
# Lint requirements
--constraint=./py{py_version}/{platform}.txt
pylint==2.4.4

View file

@ -1,4 +0,0 @@
cherrypy
pytest-salt-factories==1.0.0rc17
pythonnet==3.0.1
wmi==1.5.1; sys_platform == 'win32'

View file

@ -1,3 +0,0 @@
cherrypy
pytest-salt-factories==1.0.0rc17
docker

View file

@ -91,7 +91,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -239,11 +239,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/darwin.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -295,6 +296,8 @@ passlib==1.7.4
# via -r requirements/static/ci/common.in
pathspec==0.11.1
# via yamllint
pathtools==0.1.2
# via watchdog
platformdirs==3.5.3
# via virtualenv
pluggy==1.0.0
@ -360,7 +363,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -543,7 +546,7 @@ vultr==1.0.1
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/darwin.txt
watchdog==3.0.0
watchdog==0.10.3
# via -r requirements/static/ci/common.in
websocket-client==0.40.0
# via

View file

@ -87,7 +87,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -234,11 +234,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/freebsd.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -354,7 +355,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -101,7 +101,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -259,11 +259,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/linux.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -388,7 +389,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -1,168 +0,0 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.10/pkgtests-windows.txt requirements/base.txt requirements/static/ci/pkgtests-windows.in requirements/zeromq.txt
#
attrs==23.1.0
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
autocommand==2.2.2
# via jaraco.text
certifi==2023.07.22
# via requests
cffi==1.15.1
# via
# clr-loader
# cryptography
charset-normalizer==3.2.0
# via requests
cheroot==10.0.0
# via cherrypy
cherrypy==18.8.0
# via -r requirements/static/ci/pkgtests-windows.in
clr-loader==0.2.5
# via pythonnet
colorama==0.4.6
# via pytest
contextvars==2.4
# via -r requirements/base.txt
cryptography==41.0.4
# via -r requirements/crypto.txt
distlib==0.3.6
# via virtualenv
distro==1.8.0
# via
# -r requirements/base.txt
# pytest-skip-markers
exceptiongroup==1.1.1
# via pytest
filelock==3.12.4
# via virtualenv
idna==3.4
# via requests
immutables==0.15
# via contextvars
inflect==6.0.4
# via jaraco.text
iniconfig==2.0.0
# via pytest
jaraco.collections==4.2.0
# via cherrypy
jaraco.context==4.3.0
# via jaraco.text
jaraco.functools==3.7.0
# via
# cheroot
# jaraco.text
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.2
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
looseversion==1.2.0
# via -r requirements/base.txt
markupsafe==2.1.3
# via
# -r requirements/base.txt
# jinja2
more-itertools==9.1.0
# via
# cheroot
# cherrypy
# jaraco.functools
# jaraco.text
msgpack==1.0.5
# via
# -r requirements/base.txt
# pytest-salt-factories
packaging==23.1
# via
# -r requirements/base.txt
# pytest
platformdirs==3.5.3
# via virtualenv
pluggy==1.0.0
# via pytest
portend==3.1.0
# via cherrypy
psutil==5.9.5
# via
# -r requirements/base.txt
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pycparser==2.21
# via cffi
pycryptodomex==3.9.8
# via -r requirements/crypto.txt
pydantic==1.10.9
# via inflect
pytest-helpers-namespace==2021.12.29
# via
# pytest-salt-factories
# pytest-shell-utilities
pytest-salt-factories==1.0.0rc17
# via -r requirements/static/ci/pkgtests-windows.in
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
pytest-skip-markers==1.4.1
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-tempdir==2019.10.12
# via pytest-salt-factories
pytest==7.3.2
# via
# pytest-helpers-namespace
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
# pytest-tempdir
pythonnet==3.0.1
# via -r requirements/static/ci/pkgtests-windows.in
pytz==2023.3
# via tempora
pywin32==306
# via
# pytest-skip-markers
# wmi
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.0
# via
# -r requirements/zeromq.txt
# pytest-salt-factories
requests==2.31.0
# via -r requirements/base.txt
tempora==5.3.0
# via portend
tomli==2.0.1
# via pytest
tornado==6.3.2
# via -r requirements/base.txt
typing-extensions==4.6.3
# via
# pydantic
# pytest-shell-utilities
# pytest-system-statistics
urllib3==1.26.14
# via requests
virtualenv==20.23.0
# via pytest-salt-factories
wmi==1.5.1 ; sys_platform == "win32"
# via -r requirements/static/ci/pkgtests-windows.in
zc.lockfile==3.0.post1
# via cherrypy
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View file

@ -1,163 +0,0 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.10/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt
#
attrs==23.1.0
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
autocommand==2.2.2
# via jaraco.text
certifi==2023.07.22
# via requests
cffi==1.15.1
# via cryptography
charset-normalizer==3.2.0
# via requests
cheroot==10.0.0
# via cherrypy
cherrypy==18.8.0
# via -r requirements/static/ci/pkgtests.in
contextvars==2.4
# via -r requirements/base.txt
cryptography==41.0.4
# via -r requirements/crypto.txt
distlib==0.3.6
# via virtualenv
distro==1.8.0
# via
# -r requirements/base.txt
# pytest-skip-markers
docker==6.1.3
# via -r requirements/static/ci/pkgtests.in
exceptiongroup==1.1.1
# via pytest
filelock==3.12.4
# via virtualenv
idna==3.4
# via requests
immutables==0.15
# via contextvars
inflect==6.0.4
# via jaraco.text
iniconfig==2.0.0
# via pytest
jaraco.collections==4.2.0
# via cherrypy
jaraco.context==4.3.0
# via jaraco.text
jaraco.functools==3.7.0
# via
# cheroot
# jaraco.text
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.2
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
looseversion==1.2.0
# via -r requirements/base.txt
markupsafe==2.1.3
# via
# -r requirements/base.txt
# jinja2
more-itertools==9.1.0
# via
# cheroot
# cherrypy
# jaraco.functools
# jaraco.text
msgpack==1.0.5
# via
# -r requirements/base.txt
# pytest-salt-factories
packaging==23.1
# via
# -r requirements/base.txt
# docker
# pytest
platformdirs==3.5.3
# via virtualenv
pluggy==1.0.0
# via pytest
portend==3.1.0
# via cherrypy
psutil==5.9.5
# via
# -r requirements/base.txt
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pycparser==2.21
# via cffi
pycryptodomex==3.9.8
# via -r requirements/crypto.txt
pydantic==1.10.9
# via inflect
pytest-helpers-namespace==2021.12.29
# via
# pytest-salt-factories
# pytest-shell-utilities
pytest-salt-factories==1.0.0rc17
# via -r requirements/static/ci/pkgtests.in
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
pytest-skip-markers==1.4.1
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-tempdir==2019.10.12
# via pytest-salt-factories
pytest==7.3.2
# via
# pytest-helpers-namespace
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
# pytest-tempdir
pytz==2023.3
# via tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.0
# via
# -r requirements/zeromq.txt
# pytest-salt-factories
requests==2.31.0
# via
# -r requirements/base.txt
# docker
tempora==5.3.0
# via portend
tomli==2.0.1
# via pytest
tornado==6.3.2
# via -r requirements/base.txt
typing-extensions==4.6.3
# via
# pydantic
# pytest-shell-utilities
# pytest-system-statistics
urllib3==1.26.14
# via
# docker
# requests
virtualenv==20.23.0
# via pytest-salt-factories
websocket-client==1.6.3
# via docker
zc.lockfile==3.0.post1
# via cherrypy
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View file

@ -218,11 +218,12 @@ markupsafe==2.1.2
# jinja2
# mako
# werkzeug
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -315,7 +316,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.11/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in
#
aiohttp==3.8.5
# via etcd3-py
@ -237,11 +237,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/darwin.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -358,7 +359,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.11/docs.txt requirements/static/ci/docs.in
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/docs.txt requirements/static/ci/docs.in
#
alabaster==0.7.13
# via sphinx

View file

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.11/freebsd-crypto.txt requirements/static/ci/crypto.in
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/freebsd-crypto.txt requirements/static/ci/crypto.in
#
m2crypto==0.38.0
# via -r requirements/static/ci/crypto.in

View file

@ -232,11 +232,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/freebsd.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -352,7 +353,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -255,11 +255,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/linux.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -384,7 +385,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -1,164 +0,0 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.11/pkgtests-windows.txt requirements/base.txt requirements/static/ci/pkgtests-windows.in requirements/zeromq.txt
#
attrs==23.1.0
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
autocommand==2.2.2
# via jaraco.text
certifi==2023.07.22
# via requests
cffi==1.15.1
# via
# clr-loader
# cryptography
charset-normalizer==3.2.0
# via requests
cheroot==10.0.0
# via cherrypy
cherrypy==18.8.0
# via -r requirements/static/ci/pkgtests-windows.in
clr-loader==0.2.5
# via pythonnet
colorama==0.4.6
# via pytest
contextvars==2.4
# via -r requirements/base.txt
cryptography==41.0.4
# via -r requirements/crypto.txt
distlib==0.3.6
# via virtualenv
distro==1.8.0
# via
# -r requirements/base.txt
# pytest-skip-markers
filelock==3.12.4
# via virtualenv
idna==3.4
# via requests
immutables==0.15
# via contextvars
inflect==6.0.2
# via jaraco.text
iniconfig==2.0.0
# via pytest
jaraco.collections==4.1.0
# via cherrypy
jaraco.context==4.3.0
# via jaraco.text
jaraco.functools==3.7.0
# via
# cheroot
# jaraco.text
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.2
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
looseversion==1.2.0
# via -r requirements/base.txt
markupsafe==2.1.2
# via
# -r requirements/base.txt
# jinja2
more-itertools==9.1.0
# via
# cheroot
# cherrypy
# jaraco.functools
# jaraco.text
msgpack==1.0.5
# via
# -r requirements/base.txt
# pytest-salt-factories
packaging==23.1
# via
# -r requirements/base.txt
# pytest
platformdirs==3.5.3
# via virtualenv
pluggy==1.0.0
# via pytest
portend==3.1.0
# via cherrypy
psutil==5.9.5
# via
# -r requirements/base.txt
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pycparser==2.21
# via cffi
pycryptodomex==3.9.8
# via -r requirements/crypto.txt
pydantic==1.10.4
# via inflect
pytest-helpers-namespace==2021.12.29
# via
# pytest-salt-factories
# pytest-shell-utilities
pytest-salt-factories==1.0.0rc17
# via -r requirements/static/ci/pkgtests-windows.in
pytest-shell-utilities==1.7.0
# via pytest-salt-factories
pytest-skip-markers==1.4.1
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-tempdir==2019.10.12
# via pytest-salt-factories
pytest==7.3.2
# via
# pytest-helpers-namespace
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
# pytest-tempdir
pythonnet==3.0.1
# via -r requirements/static/ci/pkgtests-windows.in
pytz==2023.3
# via tempora
pywin32==306
# via
# pytest-skip-markers
# wmi
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.0
# via
# -r requirements/zeromq.txt
# pytest-salt-factories
requests==2.31.0
# via -r requirements/base.txt
tempora==5.3.0
# via portend
tornado==6.3.2
# via -r requirements/base.txt
typing-extensions==4.6.3
# via
# pydantic
# pytest-shell-utilities
# pytest-system-statistics
urllib3==1.26.14
# via requests
virtualenv==20.23.0
# via pytest-salt-factories
wmi==1.5.1 ; sys_platform == "win32"
# via -r requirements/static/ci/pkgtests-windows.in
zc.lockfile==3.0.post1
# via cherrypy
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View file

@ -1,159 +0,0 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.11/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt
#
attrs==23.1.0
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
autocommand==2.2.2
# via jaraco.text
certifi==2023.07.22
# via requests
cffi==1.15.1
# via cryptography
charset-normalizer==3.2.0
# via requests
cheroot==10.0.0
# via cherrypy
cherrypy==18.8.0
# via -r requirements/static/ci/pkgtests.in
contextvars==2.4
# via -r requirements/base.txt
cryptography==41.0.4
# via -r requirements/crypto.txt
distlib==0.3.6
# via virtualenv
distro==1.8.0
# via
# -r requirements/base.txt
# pytest-skip-markers
docker==6.1.3
# via -r requirements/static/ci/pkgtests.in
filelock==3.12.4
# via virtualenv
idna==3.4
# via requests
immutables==0.15
# via contextvars
inflect==6.0.2
# via jaraco.text
iniconfig==2.0.0
# via pytest
jaraco.collections==4.1.0
# via cherrypy
jaraco.context==4.3.0
# via jaraco.text
jaraco.functools==3.7.0
# via
# cheroot
# jaraco.text
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.2
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
looseversion==1.2.0
# via -r requirements/base.txt
markupsafe==2.1.2
# via
# -r requirements/base.txt
# jinja2
more-itertools==9.1.0
# via
# cheroot
# cherrypy
# jaraco.functools
# jaraco.text
msgpack==1.0.5
# via
# -r requirements/base.txt
# pytest-salt-factories
packaging==23.1
# via
# -r requirements/base.txt
# docker
# pytest
platformdirs==3.5.3
# via virtualenv
pluggy==1.0.0
# via pytest
portend==3.1.0
# via cherrypy
psutil==5.9.5
# via
# -r requirements/base.txt
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pycparser==2.21
# via cffi
pycryptodomex==3.9.8
# via -r requirements/crypto.txt
pydantic==1.10.4
# via inflect
pytest-helpers-namespace==2021.12.29
# via
# pytest-salt-factories
# pytest-shell-utilities
pytest-salt-factories==1.0.0rc17
# via -r requirements/static/ci/pkgtests.in
pytest-shell-utilities==1.7.0
# via pytest-salt-factories
pytest-skip-markers==1.4.1
# via
# pytest-salt-factories
# pytest-shell-utilities
# pytest-system-statistics
pytest-system-statistics==1.0.2
# via pytest-salt-factories
pytest-tempdir==2019.10.12
# via pytest-salt-factories
pytest==7.3.2
# via
# pytest-helpers-namespace
# pytest-salt-factories
# pytest-shell-utilities
# pytest-skip-markers
# pytest-system-statistics
# pytest-tempdir
pytz==2023.3
# via tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.0
# via
# -r requirements/zeromq.txt
# pytest-salt-factories
requests==2.31.0
# via
# -r requirements/base.txt
# docker
tempora==5.3.0
# via portend
tornado==6.3.2
# via -r requirements/base.txt
typing-extensions==4.6.3
# via
# pydantic
# pytest-shell-utilities
# pytest-system-statistics
urllib3==1.26.14
# via
# docker
# requests
virtualenv==20.23.0
# via pytest-salt-factories
websocket-client==1.6.3
# via docker
zc.lockfile==3.0.post1
# via cherrypy
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View file

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.11/windows-crypto.txt requirements/static/ci/crypto.in
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows-crypto.txt requirements/static/ci/crypto.in
#
m2crypto==0.38.0
# via -r requirements/static/ci/crypto.in

View file

@ -216,11 +216,12 @@ markupsafe==2.1.2
# jinja2
# mako
# werkzeug
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -313,7 +314,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -87,7 +87,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -238,11 +238,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/freebsd.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -358,7 +359,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -101,7 +101,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.8/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -263,11 +263,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/linux.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.8/linux.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -392,7 +393,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -222,11 +222,12 @@ markupsafe==2.1.2
# jinja2
# mako
# werkzeug
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.8/windows.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -319,7 +320,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -91,7 +91,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -239,11 +239,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/darwin.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -295,6 +296,8 @@ passlib==1.7.4
# via -r requirements/static/ci/common.in
pathspec==0.11.1
# via yamllint
pathtools==0.1.2
# via watchdog
platformdirs==3.5.3
# via virtualenv
pluggy==1.0.0
@ -360,7 +363,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories
@ -543,7 +546,7 @@ vultr==1.0.1
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/darwin.txt
watchdog==3.0.0
watchdog==0.10.3
# via -r requirements/static/ci/common.in
websocket-client==0.40.0
# via

View file

@ -87,7 +87,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -234,11 +234,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/freebsd.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -354,7 +355,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -101,7 +101,7 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==0.3.29 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
cryptography==41.0.4
# via
@ -261,11 +261,12 @@ markupsafe==2.1.2
# werkzeug
mercurial==6.4.4
# via -r requirements/static/ci/linux.in
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -390,7 +391,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -218,11 +218,12 @@ markupsafe==2.1.2
# jinja2
# mako
# werkzeug
mock==5.0.2
mock==5.1.0
# via -r requirements/pytest.txt
more-itertools==9.1.0
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt
# -r requirements/pytest.txt
# cheroot
# cherrypy
# jaraco.functools
@ -315,7 +316,7 @@ pytest-helpers-namespace==2021.12.29
# pytest-shell-utilities
pytest-httpserver==1.0.8
# via -r requirements/pytest.txt
pytest-salt-factories==1.0.0rc26
pytest-salt-factories==1.0.0rc27
# via -r requirements/pytest.txt
pytest-shell-utilities==1.8.0
# via pytest-salt-factories

View file

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/pkg/py3.11/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
autocommand==2.2.2
# via jaraco.text

View file

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/pkg/py3.11/windows.txt requirements/static/pkg/windows.in requirements/windows.txt
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/windows.txt requirements/static/pkg/windows.in requirements/windows.txt
#
autocommand==2.2.2
# via jaraco.text

View file

@ -260,14 +260,12 @@ def purge(**kwargs):
if "test" in kwargs and kwargs["test"]:
ret["result"] = True
ret["comment"].append(
"Job: {} would be deleted from schedule.".format(name)
)
ret["comment"].append(f"Job: {name} would be deleted from schedule.")
else:
if kwargs.get("offline"):
del current_schedule[name]
ret["comment"].append("Deleted job: {} from schedule.".format(name))
ret["comment"].append(f"Deleted job: {name} from schedule.")
ret["changes"][name] = "removed"
else:
@ -291,7 +289,7 @@ def purge(**kwargs):
ret["result"] = True
ret["changes"][name] = "removed"
ret["comment"].append(
"Deleted job: {} from schedule.".format(name)
f"Deleted job: {name} from schedule."
)
else:
ret["comment"].append(
@ -342,7 +340,7 @@ def delete(name, **kwargs):
"""
ret = {
"comment": "Failed to delete job {} from schedule.".format(name),
"comment": f"Failed to delete job {name} from schedule.",
"result": False,
"changes": {},
}
@ -351,7 +349,7 @@ def delete(name, **kwargs):
ret["comment"] = "Job name is required."
if "test" in kwargs and kwargs["test"]:
ret["comment"] = "Job: {} would be deleted from schedule.".format(name)
ret["comment"] = f"Job: {name} would be deleted from schedule."
ret["result"] = True
else:
if kwargs.get("offline"):
@ -380,7 +378,7 @@ def delete(name, **kwargs):
)
ret["result"] = True
ret["comment"] = "Deleted Job {} from schedule.".format(name)
ret["comment"] = f"Deleted Job {name} from schedule."
ret["changes"][name] = "removed"
else:
persist = kwargs.get("persist", True)
@ -405,7 +403,7 @@ def delete(name, **kwargs):
"persist": False,
}
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
return ret
try:
@ -521,8 +519,7 @@ def build_schedule_item(name, **kwargs):
else:
schedule[name]["enabled"] = True
if "jid_include" not in kwargs or kwargs["jid_include"]:
schedule[name]["jid_include"] = True
schedule[name]["jid_include"] = kwargs.get("jid_include", True)
if "splay" in kwargs:
if isinstance(kwargs["splay"], dict):
@ -590,7 +587,7 @@ def add(name, **kwargs):
"""
ret = {
"comment": "Failed to add job {} to schedule.".format(name),
"comment": f"Failed to add job {name} to schedule.",
"result": False,
"changes": {},
}
@ -599,7 +596,7 @@ def add(name, **kwargs):
)
if name in current_schedule:
ret["comment"] = "Job {} already exists in schedule.".format(name)
ret["comment"] = f"Job {name} already exists in schedule."
ret["result"] = False
return ret
@ -635,7 +632,7 @@ def add(name, **kwargs):
schedule_data[name] = _new
if "test" in kwargs and kwargs["test"]:
ret["comment"] = "Job: {} would be added to schedule.".format(name)
ret["comment"] = f"Job: {name} would be added to schedule."
ret["result"] = True
else:
if kwargs.get("offline"):
@ -657,7 +654,7 @@ def add(name, **kwargs):
)
ret["result"] = True
ret["comment"] = "Added job: {} to schedule.".format(name)
ret["comment"] = f"Added job: {name} to schedule."
ret["changes"][name] = "added"
else:
try:
@ -734,7 +731,7 @@ def modify(name, **kwargs):
)
if name not in current_schedule:
ret["comment"] = "Job {} does not exist in schedule.".format(name)
ret["comment"] = f"Job {name} does not exist in schedule."
ret["result"] = False
return ret
@ -757,7 +754,7 @@ def modify(name, **kwargs):
return _new
if _new == _current:
ret["comment"] = "Job {} in correct state".format(name)
ret["comment"] = f"Job {name} in correct state"
return ret
ret["changes"][name] = {
@ -766,7 +763,7 @@ def modify(name, **kwargs):
}
if "test" in kwargs and kwargs["test"]:
ret["comment"] = "Job: {} would be modified in schedule.".format(name)
ret["comment"] = f"Job: {name} would be modified in schedule."
else:
if kwargs.get("offline"):
current_schedule[name].update(_new)
@ -787,7 +784,7 @@ def modify(name, **kwargs):
)
ret["result"] = True
ret["comment"] = "Modified job: {} in schedule.".format(name)
ret["comment"] = f"Modified job: {name} in schedule."
else:
persist = kwargs.get("persist", True)
@ -809,9 +806,9 @@ def modify(name, **kwargs):
out = __salt__["event.fire"](event_data, "manage_schedule")
if out:
ret["comment"] = "Modified job: {} in schedule.".format(name)
ret["comment"] = f"Modified job: {name} in schedule."
else:
ret["comment"] = "Failed to modify job {} in schedule.".format(name)
ret["comment"] = f"Failed to modify job {name} in schedule."
ret["result"] = False
return ret
@ -840,18 +837,18 @@ def run_job(name, force=False):
if name in schedule:
data = schedule[name]
if "enabled" in data and not data["enabled"] and not force:
ret["comment"] = "Job {} is disabled.".format(name)
ret["comment"] = f"Job {name} is disabled."
else:
out = __salt__["event.fire"](
{"name": name, "func": "run_job"}, "manage_schedule"
)
if out:
ret["comment"] = "Scheduling Job {} on minion.".format(name)
ret["comment"] = f"Scheduling Job {name} on minion."
else:
ret["comment"] = "Failed to run job {} on minion.".format(name)
ret["comment"] = f"Failed to run job {name} on minion."
ret["result"] = False
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
ret["result"] = False
return ret
@ -874,7 +871,7 @@ def enable_job(name, **kwargs):
ret["result"] = False
if "test" in __opts__ and __opts__["test"]:
ret["comment"] = "Job: {} would be enabled in schedule.".format(name)
ret["comment"] = f"Job: {name} would be enabled in schedule."
else:
persist = kwargs.get("persist", True)
@ -888,7 +885,7 @@ def enable_job(name, **kwargs):
"persist": False,
}
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
ret["result"] = False
return ret
@ -905,13 +902,11 @@ def enable_job(name, **kwargs):
# check item exists in schedule and is enabled
if name in schedule and schedule[name]["enabled"]:
ret["result"] = True
ret["comment"] = "Enabled Job {} in schedule.".format(name)
ret["comment"] = f"Enabled Job {name} in schedule."
ret["changes"][name] = "enabled"
else:
ret["result"] = False
ret[
"comment"
] = "Failed to enable job {} in schedule.".format(name)
ret["comment"] = f"Failed to enable job {name} in schedule."
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
@ -937,7 +932,7 @@ def disable_job(name, **kwargs):
ret["result"] = False
if "test" in kwargs and kwargs["test"]:
ret["comment"] = "Job: {} would be disabled in schedule.".format(name)
ret["comment"] = f"Job: {name} would be disabled in schedule."
else:
persist = kwargs.get("persist", True)
@ -951,7 +946,7 @@ def disable_job(name, **kwargs):
"persist": False,
}
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
ret["result"] = False
return ret
@ -968,13 +963,13 @@ def disable_job(name, **kwargs):
# check item exists in schedule and is enabled
if name in schedule and not schedule[name]["enabled"]:
ret["result"] = True
ret["comment"] = "Disabled Job {} in schedule.".format(name)
ret["comment"] = f"Disabled Job {name} in schedule."
ret["changes"][name] = "disabled"
else:
ret["result"] = False
ret[
"comment"
] = "Failed to disable job {} in schedule.".format(name)
] = f"Failed to disable job {name} in schedule."
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
@ -1139,9 +1134,7 @@ def reload_():
try:
schedule = salt.utils.yaml.safe_load(fp_)
except salt.utils.yaml.YAMLError as exc:
ret["comment"].append(
"Unable to read existing schedule file: {}".format(exc)
)
ret["comment"].append(f"Unable to read existing schedule file: {exc}")
if schedule:
if "schedule" in schedule and schedule["schedule"]:
@ -1189,7 +1182,7 @@ def move(name, target, **kwargs):
ret["result"] = False
if "test" in kwargs and kwargs["test"]:
ret["comment"] = "Job: {} would be moved from schedule.".format(name)
ret["comment"] = f"Job: {name} would be moved from schedule."
else:
opts_schedule = list_(show_all=True, where="opts", return_yaml=False)
pillar_schedule = list_(show_all=True, where="pillar", return_yaml=False)
@ -1201,13 +1194,13 @@ def move(name, target, **kwargs):
schedule_data = pillar_schedule[name]
where = "pillar"
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
ret["result"] = False
return ret
schedule_opts = []
for key, value in schedule_data.items():
temp = "{}={}".format(key, value)
temp = f"{key}={value}"
schedule_opts.append(temp)
response = __salt__["publish.publish"](target, "schedule.add", schedule_opts)
@ -1230,7 +1223,7 @@ def move(name, target, **kwargs):
else:
delete(name, where=where)
ret["result"] = True
ret["comment"] = "Moved Job {} from schedule.".format(name)
ret["comment"] = f"Moved Job {name} from schedule."
ret["minions"] = minions
return ret
return ret
@ -1254,7 +1247,7 @@ def copy(name, target, **kwargs):
ret["result"] = False
if "test" in kwargs and kwargs["test"]:
ret["comment"] = "Job: {} would be copied from schedule.".format(name)
ret["comment"] = f"Job: {name} would be copied from schedule."
else:
opts_schedule = list_(show_all=True, where="opts", return_yaml=False)
pillar_schedule = list_(show_all=True, where="pillar", return_yaml=False)
@ -1264,13 +1257,13 @@ def copy(name, target, **kwargs):
elif name in pillar_schedule:
schedule_data = pillar_schedule[name]
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
ret["result"] = False
return ret
schedule_opts = []
for key, value in schedule_data.items():
temp = "{}={}".format(key, value)
temp = f"{key}={value}"
schedule_opts.append(temp)
response = __salt__["publish.publish"](target, "schedule.add", schedule_opts)
@ -1292,7 +1285,7 @@ def copy(name, target, **kwargs):
return ret
else:
ret["result"] = True
ret["comment"] = "Copied Job {} from schedule to minion(s).".format(name)
ret["comment"] = f"Copied Job {name} from schedule to minion(s)."
ret["minions"] = minions
return ret
return ret
@ -1355,7 +1348,7 @@ def postpone_job(name, current_time, new_time, **kwargs):
return ret
if "test" in __opts__ and __opts__["test"]:
ret["comment"] = "Job: {} would be postponed in schedule.".format(name)
ret["comment"] = f"Job: {name} would be postponed in schedule."
else:
if name in list_(show_all=True, where="opts", return_yaml=False):
@ -1376,7 +1369,7 @@ def postpone_job(name, current_time, new_time, **kwargs):
"func": "postpone_job",
}
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
ret["result"] = False
return ret
@ -1400,7 +1393,7 @@ def postpone_job(name, current_time, new_time, **kwargs):
ret["result"] = False
ret[
"comment"
] = "Failed to postpone job {} in schedule.".format(name)
] = f"Failed to postpone job {name} in schedule."
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system
@ -1446,7 +1439,7 @@ def skip_job(name, current_time, **kwargs):
return ret
if "test" in __opts__ and __opts__["test"]:
ret["comment"] = "Job: {} would be skipped in schedule.".format(name)
ret["comment"] = f"Job: {name} would be skipped in schedule."
else:
if name in list_(show_all=True, where="opts", return_yaml=False):
@ -1465,7 +1458,7 @@ def skip_job(name, current_time, **kwargs):
"func": "skip_job",
}
else:
ret["comment"] = "Job {} does not exist.".format(name)
ret["comment"] = f"Job {name} does not exist."
ret["result"] = False
return ret
@ -1487,9 +1480,7 @@ def skip_job(name, current_time, **kwargs):
)
else:
ret["result"] = False
ret[
"comment"
] = "Failed to skip job {} in schedule.".format(name)
ret["comment"] = f"Failed to skip job {name} in schedule."
return ret
except KeyError:
# Effectively a no-op, since we can't really return without an event system

View file

@ -483,19 +483,19 @@ class Compiler:
else:
fun = 0
if "." in state:
# This should not happen usually since `pad_funcs`
# is run on rendered templates
fun += 1
for arg in body[state]:
if isinstance(arg, str):
fun += 1
if " " in arg.strip():
errors.append(
'The function "{}" in state '
'"{}" in SLS "{}" has '
f'The function "{arg}" in state '
f'"{name}" in SLS "{body["__sls__"]}" has '
"whitespace, a function with whitespace is "
"not supported, perhaps this is an argument "
'that is missing a ":"'.format(
arg, name, body["__sls__"]
)
"not supported, perhaps this is an argument"
' that is missing a ":"'
)
elif isinstance(arg, dict):
# The arg is a dict, if the arg is require or
@ -591,14 +591,22 @@ class Compiler:
if state == "require" or state == "watch":
continue
errors.append(
"No function declared in state '{}' in SLS '{}'".format(
state, body["__sls__"]
)
f"No function declared in state '{name}' in SLS "
f"'{body['__sls__']}'"
)
elif fun > 1:
funs = (
[state.split(".", maxsplit=1)[1]]
if "." in state
else []
)
funs.extend(
arg for arg in body[state] if isinstance(arg, str)
)
errors.append(
"Too many functions declared in state '{}' in "
"SLS '{}'".format(state, body["__sls__"])
f"Too many functions declared in state '{name}' in "
f"SLS '{body['__sls__']}'. Please choose one of "
"the following: " + ", ".join(funs)
)
return errors
@ -1506,17 +1514,21 @@ class State:
else:
fun = 0
if "." in state:
# This should not happen usually since `_handle_state_decls`
# is run on rendered templates
fun += 1
for arg in body[state]:
if isinstance(arg, str):
fun += 1
if " " in arg.strip():
errors.append(
'The function "{}" in state "{}" in SLS "{}" has '
"whitespace, a function with whitespace is not "
"supported, perhaps this is an argument that is "
'missing a ":"'.format(arg, name, body["__sls__"])
f'The function "{arg}" in state '
f'"{name}" in SLS "{body["__sls__"]}" has '
"whitespace, a function with whitespace is "
"not supported, perhaps this is an argument"
' that is missing a ":"'
)
elif isinstance(arg, dict):
# The arg is a dict, if the arg is require or
# watch, it must be a list.
@ -1609,14 +1621,16 @@ class State:
if state == "require" or state == "watch":
continue
errors.append(
"No function declared in state '{}' in SLS '{}'".format(
state, body["__sls__"]
)
f"No function declared in state '{name}' in SLS "
f"'{body['__sls__']}'"
)
elif fun > 1:
funs = [state.split(".", maxsplit=1)[1]] if "." in state else []
funs.extend(arg for arg in body[state] if isinstance(arg, str))
errors.append(
"Too many functions declared in state '{}' in "
"SLS '{}'".format(state, body["__sls__"])
f"Too many functions declared in state '{name}' in "
f"SLS '{body['__sls__']}'. Please choose one of "
"the following: " + ", ".join(funs)
)
return errors

Some files were not shown because too many files have changed in this diff Show more