mirror of
https://github.com/saltstack/salt.git
synced 2025-04-16 17:50:20 +00:00
Wean off define testrun
This commit is contained in:
parent
21df2a121d
commit
2043789125
11 changed files with 126 additions and 751 deletions
28
.github/workflows/ci.yml
vendored
28
.github/workflows/ci.yml
vendored
|
@ -41,9 +41,7 @@ jobs:
|
|||
runs-on: ubuntu-22.04
|
||||
environment: ci
|
||||
outputs:
|
||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
|
@ -189,11 +187,6 @@ jobs:
|
|||
run: |
|
||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||
|
||||
- name: Define Jobs To Run
|
||||
id: define-jobs
|
||||
run: |
|
||||
tools ci define-jobs ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Get Salt Releases
|
||||
id: get-salt-releases
|
||||
env:
|
||||
|
@ -208,23 +201,18 @@ jobs:
|
|||
run: |
|
||||
tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
|
||||
- name: Define Testrun
|
||||
id: define-testrun
|
||||
run: |
|
||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Define workflow config
|
||||
id: workflow-config
|
||||
run: |
|
||||
tools ci workflow-config ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Contents of generated testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
run: |
|
||||
cat testrun-changed-files.txt || true
|
||||
|
||||
- name: Upload testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: testrun-changed-files.txt
|
||||
|
@ -440,7 +428,7 @@ jobs:
|
|||
|
||||
build-pkgs-onedir:
|
||||
name: Build Packages
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -455,7 +443,7 @@ jobs:
|
|||
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||
build-ci-deps:
|
||||
name: CI Deps
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -483,7 +471,7 @@ jobs:
|
|||
nox-version: 2022.8.7
|
||||
python-version: "3.10"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}
|
||||
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
|
||||
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
|
||||
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||
|
@ -498,10 +486,10 @@ jobs:
|
|||
nox-session: ci-test-onedir
|
||||
nox-version: 2022.8.7
|
||||
python-version: "3.10"
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)["testrun"]) }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
|
||||
|
@ -509,7 +497,7 @@ jobs:
|
|||
|
||||
combine-all-code-coverage:
|
||||
name: Combine Code Coverage
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
PIP_INDEX_URL: https://pypi.org/simple
|
||||
|
|
24
.github/workflows/nightly.yml
vendored
24
.github/workflows/nightly.yml
vendored
|
@ -90,9 +90,7 @@ jobs:
|
|||
needs:
|
||||
- workflow-requirements
|
||||
outputs:
|
||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
|
@ -238,11 +236,6 @@ jobs:
|
|||
run: |
|
||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||
|
||||
- name: Define Jobs To Run
|
||||
id: define-jobs
|
||||
run: |
|
||||
tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Get Salt Releases
|
||||
id: get-salt-releases
|
||||
env:
|
||||
|
@ -257,23 +250,18 @@ jobs:
|
|||
run: |
|
||||
tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
|
||||
- name: Define Testrun
|
||||
id: define-testrun
|
||||
run: |
|
||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Define workflow config
|
||||
id: workflow-config
|
||||
run: |
|
||||
tools ci workflow-config${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Contents of generated testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
run: |
|
||||
cat testrun-changed-files.txt || true
|
||||
|
||||
- name: Upload testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: testrun-changed-files.txt
|
||||
|
@ -494,7 +482,7 @@ jobs:
|
|||
|
||||
build-pkgs-onedir:
|
||||
name: Build Packages
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -514,7 +502,7 @@ jobs:
|
|||
|
||||
build-pkgs-src:
|
||||
name: Build Packages
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -533,7 +521,7 @@ jobs:
|
|||
secrets: inherit
|
||||
build-ci-deps:
|
||||
name: CI Deps
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -576,7 +564,7 @@ jobs:
|
|||
nox-session: ci-test-onedir
|
||||
nox-version: 2022.8.7
|
||||
python-version: "3.10"
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)["testrun"]) }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
||||
skip-code-coverage: true
|
||||
|
|
22
.github/workflows/scheduled.yml
vendored
22
.github/workflows/scheduled.yml
vendored
|
@ -80,9 +80,7 @@ jobs:
|
|||
needs:
|
||||
- workflow-requirements
|
||||
outputs:
|
||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
|
@ -228,11 +226,6 @@ jobs:
|
|||
run: |
|
||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||
|
||||
- name: Define Jobs To Run
|
||||
id: define-jobs
|
||||
run: |
|
||||
tools ci define-jobs ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Get Salt Releases
|
||||
id: get-salt-releases
|
||||
env:
|
||||
|
@ -247,23 +240,18 @@ jobs:
|
|||
run: |
|
||||
tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
|
||||
- name: Define Testrun
|
||||
id: define-testrun
|
||||
run: |
|
||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Define workflow config
|
||||
id: workflow-config
|
||||
run: |
|
||||
tools ci workflow-config ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Contents of generated testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
run: |
|
||||
cat testrun-changed-files.txt || true
|
||||
|
||||
- name: Upload testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: testrun-changed-files.txt
|
||||
|
@ -479,7 +467,7 @@ jobs:
|
|||
|
||||
build-pkgs-onedir:
|
||||
name: Build Packages
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -494,7 +482,7 @@ jobs:
|
|||
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||
build-ci-deps:
|
||||
name: CI Deps
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -537,7 +525,7 @@ jobs:
|
|||
nox-session: ci-test-onedir
|
||||
nox-version: 2022.8.7
|
||||
python-version: "3.10"
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)["testrun"]) }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
||||
skip-code-coverage: true
|
||||
|
|
24
.github/workflows/staging.yml
vendored
24
.github/workflows/staging.yml
vendored
|
@ -71,9 +71,7 @@ jobs:
|
|||
needs:
|
||||
- check-requirements
|
||||
outputs:
|
||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
|
@ -228,11 +226,6 @@ jobs:
|
|||
run: |
|
||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||
|
||||
- name: Define Jobs To Run
|
||||
id: define-jobs
|
||||
run: |
|
||||
tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Get Salt Releases
|
||||
id: get-salt-releases
|
||||
env:
|
||||
|
@ -247,23 +240,18 @@ jobs:
|
|||
run: |
|
||||
tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
|
||||
- name: Define Testrun
|
||||
id: define-testrun
|
||||
run: |
|
||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Define workflow config
|
||||
id: workflow-config
|
||||
run: |
|
||||
tools ci workflow-config${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Contents of generated testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
run: |
|
||||
cat testrun-changed-files.txt || true
|
||||
|
||||
- name: Upload testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: testrun-changed-files.txt
|
||||
|
@ -480,7 +468,7 @@ jobs:
|
|||
|
||||
build-pkgs-onedir:
|
||||
name: Build Packages
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -500,7 +488,7 @@ jobs:
|
|||
|
||||
build-pkgs-src:
|
||||
name: Build Packages
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -519,7 +507,7 @@ jobs:
|
|||
secrets: inherit
|
||||
build-ci-deps:
|
||||
name: CI Deps
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
@ -562,7 +550,7 @@ jobs:
|
|||
nox-session: ci-test-onedir
|
||||
nox-version: 2022.8.7
|
||||
python-version: "3.10"
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)["testrun"]) }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
||||
skip-code-coverage: true
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<%- do test_salt_linux_needs.append("build-ci-deps") %>
|
||||
name: CI Deps
|
||||
<%- if workflow_slug != 'release' %>
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
|
||||
<%- endif %>
|
||||
needs:
|
||||
- prepare-workflow
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
<{ job_name }>:
|
||||
name: Build Packages
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
|
|
2
.github/workflows/templates/ci.yml.jinja
vendored
2
.github/workflows/templates/ci.yml.jinja
vendored
|
@ -301,7 +301,7 @@
|
|||
combine-all-code-coverage:
|
||||
<%- do conclusion_needs.append("combine-all-code-coverage") %>
|
||||
name: Combine Code Coverage
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
PIP_INDEX_URL: https://pypi.org/simple
|
||||
|
|
26
.github/workflows/templates/layout.yml.jinja
vendored
26
.github/workflows/templates/layout.yml.jinja
vendored
|
@ -5,7 +5,7 @@
|
|||
<%- set prepare_workflow_skip_pkg_test_suite = prepare_workflow_skip_pkg_test_suite|default("") %>
|
||||
<%- set prepare_workflow_skip_pkg_download_test_suite = prepare_workflow_skip_pkg_download_test_suite|default("") %>
|
||||
<%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %>
|
||||
<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}") %>
|
||||
<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}") %>
|
||||
<%- set gpg_key_id = "64CBBC8173D76B3F" %>
|
||||
<%- set prepare_actual_release = prepare_actual_release | default(False) %>
|
||||
<%- set gh_actions_workflows_python_version = "3.10" %>
|
||||
|
@ -89,9 +89,7 @@ jobs:
|
|||
<%- endfor %>
|
||||
<%- endif %>
|
||||
outputs:
|
||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
|
@ -251,13 +249,6 @@ jobs:
|
|||
run: |
|
||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||
|
||||
- name: Define Jobs To Run
|
||||
id: define-jobs
|
||||
run: |
|
||||
tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{
|
||||
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
|
||||
}> ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Get Salt Releases
|
||||
id: get-salt-releases
|
||||
env:
|
||||
|
@ -272,11 +263,6 @@ jobs:
|
|||
run: |
|
||||
tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
|
||||
- name: Define Testrun
|
||||
id: define-testrun
|
||||
run: |
|
||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Define workflow config
|
||||
id: workflow-config
|
||||
run: |
|
||||
|
@ -285,12 +271,12 @@ jobs:
|
|||
}> ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Contents of generated testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
run: |
|
||||
cat testrun-changed-files.txt || true
|
||||
|
||||
- name: Upload testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['testrun']['type'] != 'full' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: testrun-changed-files.txt
|
||||
|
@ -299,18 +285,18 @@ jobs:
|
|||
{# We can't yet use tokenless uploads with the codecov CLI
|
||||
|
||||
- name: Install Codecov CLI
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
|
||||
run: |
|
||||
python3 -m pip install codecov-cli
|
||||
|
||||
- name: Save Commit Metadata In Codecov
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
|
||||
run: |
|
||||
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
|
||||
create-commit --git-service github --sha ${{ github.sha }}
|
||||
|
||||
- name: Create Codecov Coverage Report
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }}
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
|
||||
run: |
|
||||
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
|
||||
create-report --git-service github --sha ${{ github.sha }}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<%- do conclusion_needs.append(job_name) %>
|
||||
name: Package Downloads
|
||||
<%- if gh_environment == "staging" %>
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] }}
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg-download'] }}
|
||||
<%- else %>
|
||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
||||
<%- endif %>
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
nox-session: ci-test-onedir
|
||||
nox-version: <{ nox_version }>
|
||||
python-version: "<{ gh_actions_workflows_python_version }>"
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)["testrun"]) }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||
|
|
743
tools/ci.py
743
tools/ci.py
|
@ -348,198 +348,6 @@ class TestRun(TypedDict):
|
|||
selected_tests: NotRequired[dict[str, bool]]
|
||||
|
||||
|
||||
@ci.command(
|
||||
name="define-testrun",
|
||||
arguments={
|
||||
"event_name": {
|
||||
"help": "The name of the GitHub event being processed.",
|
||||
},
|
||||
"changed_files": {
|
||||
"help": (
|
||||
"Path to '.json' file containing the payload of changed files "
|
||||
"from the 'dorny/paths-filter' GitHub action."
|
||||
),
|
||||
},
|
||||
},
|
||||
)
|
||||
def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
||||
"""
|
||||
Set GH Actions outputs for what and how Salt should be tested.
|
||||
"""
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert github_output is not None
|
||||
|
||||
github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY")
|
||||
if github_step_summary is None:
|
||||
ctx.warn("The 'GITHUB_STEP_SUMMARY' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert github_step_summary is not None
|
||||
|
||||
labels: list[str] = []
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is not None:
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
|
||||
except Exception as exc:
|
||||
ctx.error(
|
||||
f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc
|
||||
)
|
||||
ctx.exit(1)
|
||||
|
||||
labels.extend(
|
||||
label[0] for label in _get_pr_test_labels_from_event_payload(gh_event)
|
||||
)
|
||||
|
||||
if "test:coverage" in labels:
|
||||
ctx.info("Writing 'testrun' to the github outputs file")
|
||||
# skip running code coverage for now, was False
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"testrun={json.dumps(testrun)}\n")
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(
|
||||
"Full test run chosen because the label `test:coverage` is set.\n"
|
||||
)
|
||||
return
|
||||
elif event_name != "pull_request":
|
||||
# In this case, a full test run is in order
|
||||
ctx.info("Writing 'testrun' to the github outputs file")
|
||||
# skip running code coverage for now, was False
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"testrun={json.dumps(testrun)}\n")
|
||||
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"Full test run chosen due to event type of `{event_name}`.\n")
|
||||
return
|
||||
|
||||
# So, it's a pull request...
|
||||
|
||||
if not changed_files.exists():
|
||||
ctx.error(f"The '{changed_files}' file does not exist.")
|
||||
ctx.error(
|
||||
"FYI, the command 'tools process-changed-files <changed-files-path>' "
|
||||
"needs to run prior to this one."
|
||||
)
|
||||
ctx.exit(1)
|
||||
try:
|
||||
changed_files_contents = json.loads(changed_files.read_text())
|
||||
except Exception as exc:
|
||||
ctx.error(f"Could not load the changed files from '{changed_files}': {exc}")
|
||||
ctx.exit(1)
|
||||
|
||||
# Based on which files changed, or other things like PR labels we can
|
||||
# decide what to run, or even if the full test run should be running on the
|
||||
# pull request, etc...
|
||||
changed_pkg_requirements_files = json.loads(
|
||||
changed_files_contents["pkg_requirements_files"]
|
||||
)
|
||||
changed_test_requirements_files = json.loads(
|
||||
changed_files_contents["test_requirements_files"]
|
||||
)
|
||||
if changed_files_contents["golden_images"] == "true":
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(
|
||||
"Full test run chosen because there was a change made "
|
||||
"to `cicd/golden-images.json`.\n"
|
||||
)
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
elif changed_pkg_requirements_files or changed_test_requirements_files:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(
|
||||
"Full test run chosen because there was a change made "
|
||||
"to the requirements files.\n"
|
||||
)
|
||||
wfh.write(
|
||||
"<details>\n<summary>Changed Requirements Files (click me)</summary>\n<pre>\n"
|
||||
)
|
||||
for path in sorted(
|
||||
changed_pkg_requirements_files + changed_test_requirements_files
|
||||
):
|
||||
wfh.write(f"{path}\n")
|
||||
wfh.write("</pre>\n</details>\n")
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
elif "test:full" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Full test run chosen because the label `test:full` is set.\n")
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
else:
|
||||
testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
|
||||
testrun = TestRun(
|
||||
type="changed",
|
||||
skip_code_coverage=True,
|
||||
from_filenames=str(
|
||||
testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
|
||||
),
|
||||
)
|
||||
ctx.info(f"Writing {testrun_changed_files_path.name} ...")
|
||||
selected_changed_files = []
|
||||
for fpath in json.loads(changed_files_contents["testrun_files"]):
|
||||
if fpath.startswith(("tools/", "tasks/")):
|
||||
continue
|
||||
if fpath in ("noxfile.py",):
|
||||
continue
|
||||
if fpath == "tests/conftest.py":
|
||||
# In this particular case, just run the full test suite
|
||||
testrun["type"] = "full"
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(
|
||||
f"Full test run chosen because there was a change to `{fpath}`.\n"
|
||||
)
|
||||
selected_changed_files.append(fpath)
|
||||
testrun_changed_files_path.write_text("\n".join(sorted(selected_changed_files)))
|
||||
if testrun["type"] == "changed":
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Partial test run chosen.\n")
|
||||
testrun["selected_tests"] = {
|
||||
"core": False,
|
||||
"slow": False,
|
||||
"fast": True,
|
||||
"flaky": False,
|
||||
}
|
||||
if "test:slow" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Slow tests chosen by `test:slow` label.\n")
|
||||
testrun["selected_tests"]["slow"] = True
|
||||
if "test:core" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Core tests chosen by `test:core` label.\n")
|
||||
testrun["selected_tests"]["core"] = True
|
||||
if "test:no-fast" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Fast tests deselected by `test:no-fast` label.\n")
|
||||
testrun["selected_tests"]["fast"] = False
|
||||
if "test:flaky-jail" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Flaky jailed tests chosen by `test:flaky-jail` label.\n")
|
||||
testrun["selected_tests"]["flaky"] = True
|
||||
if selected_changed_files:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(
|
||||
"<details>\n<summary>Selected Changed Files (click me)</summary>\n<pre>\n"
|
||||
)
|
||||
for path in sorted(selected_changed_files):
|
||||
wfh.write(f"{path}\n")
|
||||
wfh.write("</pre>\n</details>\n")
|
||||
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("<details>\n<summary>All Changed Files (click me)</summary>\n<pre>\n")
|
||||
for path in sorted(json.loads(changed_files_contents["repo_files"])):
|
||||
wfh.write(f"{path}\n")
|
||||
wfh.write("</pre>\n</details>\n")
|
||||
|
||||
ctx.info("Writing 'testrun' to the github outputs file:\n", testrun)
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"testrun={json.dumps(testrun)}\n")
|
||||
|
||||
|
||||
def _build_matrix(os_kind, linux_arm_runner):
|
||||
"""
|
||||
Generate matrix for build ci/cd steps.
|
||||
|
@ -557,466 +365,6 @@ def _build_matrix(os_kind, linux_arm_runner):
|
|||
return _matrix
|
||||
|
||||
|
||||
@ci.command(
|
||||
arguments={
|
||||
"distro_slug": {
|
||||
"help": "The distribution slug to generate the matrix for",
|
||||
},
|
||||
"full": {
|
||||
"help": "Full test run",
|
||||
},
|
||||
"workflow": {
|
||||
"help": "Which workflow is running",
|
||||
},
|
||||
},
|
||||
)
|
||||
def matrix(
|
||||
ctx: Context,
|
||||
distro_slug: str,
|
||||
full: bool = False,
|
||||
workflow: str = "ci",
|
||||
):
|
||||
"""
|
||||
Generate the test matrix.
|
||||
"""
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is None:
|
||||
ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event_path is not None
|
||||
|
||||
gh_event = None
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
|
||||
except Exception as exc:
|
||||
ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event is not None
|
||||
|
||||
_matrix = []
|
||||
_splits = {
|
||||
"functional": 4,
|
||||
"integration": 7,
|
||||
"scenarios": 1,
|
||||
"unit": 4,
|
||||
}
|
||||
for transport in ("zeromq", "tcp"):
|
||||
if transport == "tcp":
|
||||
if distro_slug not in (
|
||||
"rockylinux-9",
|
||||
"rockylinux-9-arm64",
|
||||
"photonos-5",
|
||||
"photonos-5-arm64",
|
||||
"ubuntu-22.04",
|
||||
"ubuntu-22.04-arm64",
|
||||
):
|
||||
# Only run TCP transport tests on these distributions
|
||||
continue
|
||||
for chunk in ("unit", "functional", "integration", "scenarios"):
|
||||
if transport == "tcp" and chunk in ("unit", "functional"):
|
||||
# Only integration and scenarios shall be tested under TCP,
|
||||
# the rest would be repeating tests
|
||||
continue
|
||||
if "macos" in distro_slug and chunk == "scenarios":
|
||||
continue
|
||||
splits = _splits.get(chunk) or 1
|
||||
if full and splits > 1:
|
||||
for split in range(1, splits + 1):
|
||||
_matrix.append(
|
||||
{
|
||||
"transport": transport,
|
||||
"tests-chunk": chunk,
|
||||
"test-group": split,
|
||||
"test-group-count": splits,
|
||||
}
|
||||
)
|
||||
else:
|
||||
_matrix.append({"transport": transport, "tests-chunk": chunk})
|
||||
|
||||
ctx.info("Generated matrix:")
|
||||
if not _matrix:
|
||||
ctx.print(" * `None`")
|
||||
else:
|
||||
for entry in _matrix:
|
||||
ctx.print(" * ", entry, soft_wrap=True)
|
||||
|
||||
if (
|
||||
gh_event["repository"]["fork"] is True
|
||||
and "macos" in distro_slug
|
||||
and "arm64" in distro_slug
|
||||
):
|
||||
ctx.warn("Forks don't have access to MacOS 13 Arm64. Clearning the matrix.")
|
||||
_matrix.clear()
|
||||
|
||||
if not _matrix:
|
||||
build_reports = False
|
||||
ctx.info("Not building reports because the matrix is empty")
|
||||
else:
|
||||
build_reports = True
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is not None:
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"matrix={json.dumps(_matrix)}\n")
|
||||
wfh.write(f"build-reports={json.dumps(build_reports)}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
@ci.command(
|
||||
name="pkg-matrix",
|
||||
arguments={
|
||||
"distro_slug": {
|
||||
"help": "The distribution slug to generate the matrix for",
|
||||
},
|
||||
"pkg_type": {
|
||||
"help": "The type of package we are testing against",
|
||||
},
|
||||
"testing_releases": {
|
||||
"help": "The salt releases to test upgrades against",
|
||||
"nargs": "+",
|
||||
"required": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
def pkg_matrix(
|
||||
ctx: Context,
|
||||
distro_slug: str,
|
||||
pkg_type: str,
|
||||
testing_releases: list[tools.utils.Version] = None,
|
||||
):
|
||||
"""
|
||||
Generate the test matrix.
|
||||
"""
|
||||
gh_event = None
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is None:
|
||||
ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event_path is not None
|
||||
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
|
||||
except Exception as exc:
|
||||
ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event is not None
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert testing_releases
|
||||
|
||||
adjusted_versions = []
|
||||
for ver in testing_releases:
|
||||
adjusted_versions.append((ver, "relenv"))
|
||||
ctx.info(f"Will look for the following versions: {adjusted_versions}")
|
||||
|
||||
# Filter out the prefixes to look under
|
||||
if "macos-" in distro_slug:
|
||||
# We don't have golden images for macos, handle these separately
|
||||
prefixes = {
|
||||
"classic": "osx/",
|
||||
"tiamat": "salt/py3/macos/minor/",
|
||||
"relenv": "salt/py3/macos/minor/",
|
||||
}
|
||||
name = "macos"
|
||||
else:
|
||||
parts = distro_slug.split("-")
|
||||
name = parts[0]
|
||||
version = parts[1]
|
||||
|
||||
if len(parts) > 2:
|
||||
arch = parts[2]
|
||||
elif name in ("debian", "ubuntu"):
|
||||
arch = "amd64"
|
||||
else:
|
||||
arch = "x86_64"
|
||||
|
||||
ctx.info(f"Parsed linux slug parts {name} {version} {arch}")
|
||||
|
||||
if name == "amazonlinux":
|
||||
name = "amazon"
|
||||
elif name == "rockylinux":
|
||||
name = "redhat"
|
||||
elif "photon" in name:
|
||||
name = "photon"
|
||||
|
||||
if name == "windows":
|
||||
prefixes = {
|
||||
"classic": "windows/",
|
||||
"tiamat": "salt/py3/windows/minor",
|
||||
"relenv": "salt/py3/windows/minor",
|
||||
}
|
||||
else:
|
||||
prefixes = {
|
||||
"classic": f"py3/{name}/{version}/{arch}/",
|
||||
"tiamat": f"salt/py3/{name}/{version}/{arch}/minor/",
|
||||
"relenv": f"salt/py3/{name}/{version}/{arch}/minor/",
|
||||
}
|
||||
_matrix = []
|
||||
|
||||
# XXX: fetch versions
|
||||
# s3 = boto3.client("s3")
|
||||
# paginator = s3.get_paginator("list_objects_v2")
|
||||
_matrix = [
|
||||
{
|
||||
"tests-chunk": "install",
|
||||
"version": None,
|
||||
}
|
||||
]
|
||||
|
||||
for version, backend in adjusted_versions:
|
||||
prefix = prefixes[backend]
|
||||
# TODO: Remove this after 3009.0
|
||||
if backend == "relenv" and version >= tools.utils.Version("3006.5"):
|
||||
prefix.replace("/arm64/", "/aarch64/")
|
||||
# Using a paginator allows us to list recursively and avoid the item limit
|
||||
# page_iterator = paginator.paginate(
|
||||
# Bucket=f"salt-project-{tools.utils.SPB_ENVIRONMENT}-salt-artifacts-release",
|
||||
# Prefix=prefix,
|
||||
# )
|
||||
# Uses a jmespath expression to test if the wanted version is in any of the filenames
|
||||
# key_filter = f"Contents[?contains(Key, '{version}')][]"
|
||||
# if pkg_type == "MSI":
|
||||
# # TODO: Add this back when we add MSI upgrade and downgrade tests
|
||||
# # key_filter = f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.msi')]"
|
||||
# continue
|
||||
# elif pkg_type == "NSIS":
|
||||
# key_filter = (
|
||||
# f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.exe')]"
|
||||
# )
|
||||
# continue
|
||||
# objects = list(page_iterator.search(key_filter))
|
||||
# Testing using `any` because sometimes the paginator returns `[None]`
|
||||
# if any(objects):
|
||||
# ctx.info(
|
||||
# f"Found {version} ({backend}) for {distro_slug}: {objects[0]['Key']}"
|
||||
# )
|
||||
# for session in ("upgrade", "downgrade"):
|
||||
# if backend == "classic":
|
||||
# session += "-classic"
|
||||
# _matrix.append(
|
||||
# {
|
||||
# "tests-chunk": session,
|
||||
# "version": str(version),
|
||||
# }
|
||||
# )
|
||||
# else:
|
||||
# ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}")
|
||||
if name == "windows":
|
||||
sessions = [
|
||||
"upgrade",
|
||||
]
|
||||
else:
|
||||
sessions = ["upgrade", "downgrade"]
|
||||
for session in sessions:
|
||||
_matrix.append(
|
||||
{
|
||||
"tests-chunk": session,
|
||||
"version": str(version),
|
||||
}
|
||||
)
|
||||
|
||||
ctx.info("Generated matrix:")
|
||||
if not _matrix:
|
||||
ctx.print(" * `None`")
|
||||
else:
|
||||
for entry in _matrix:
|
||||
ctx.print(" * ", entry, soft_wrap=True)
|
||||
|
||||
# if (
|
||||
# gh_event["repository"]["fork"] is True
|
||||
# and "macos" in distro_slug
|
||||
# and "arm64" in distro_slug
|
||||
# ):
|
||||
# # XXX: This should work now
|
||||
# ctx.warn("Forks don't have access to MacOS 13 Arm64. Clearning the matrix.")
|
||||
# _matrix.clear()
|
||||
|
||||
if (
|
||||
arch == "arm64"
|
||||
and name not in ["windows", "macos"]
|
||||
and os.environ.get("LINUX_ARM_RUNNER", "0") not in ("0", "")
|
||||
):
|
||||
ctx.warn("This fork does not have a linux arm64 runner configured.")
|
||||
_matrix.clear()
|
||||
|
||||
if not _matrix:
|
||||
build_reports = False
|
||||
ctx.info("Not building reports because the matrix is empty")
|
||||
else:
|
||||
build_reports = True
|
||||
|
||||
if github_output is not None:
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"matrix={json.dumps(_matrix)}\n")
|
||||
wfh.write(f"build-reports={json.dumps(build_reports)}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
@ci.command(name="deps-matrix")
|
||||
def get_ci_deps_matrix(ctx: Context):
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is None:
|
||||
ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event_path is not None
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert github_output is not None
|
||||
|
||||
gh_event = None
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
|
||||
except Exception as exc:
|
||||
ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event is not None
|
||||
|
||||
_matrix = {
|
||||
"linux": [
|
||||
{"arch": "x86_64"},
|
||||
],
|
||||
"macos": [
|
||||
{"distro-slug": "macos-13", "arch": "x86_64"},
|
||||
{"distro-slug": "macos-14", "arch": "arm64"},
|
||||
],
|
||||
"windows": [
|
||||
{"distro-slug": "windows-2022", "arch": "amd64"},
|
||||
],
|
||||
}
|
||||
if os.environ.get("LINUX_ARM_RUNNER", "0") not in ("0", ""):
|
||||
_matrix["linux"].append({"arch": "arm64"})
|
||||
|
||||
ctx.info("Generated matrix:")
|
||||
ctx.print(_matrix, soft_wrap=True)
|
||||
|
||||
if github_output is not None:
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"matrix={json.dumps(_matrix)}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
@ci.command(name="pkg-downloads-matrix")
|
||||
def get_pkg_downloads_matrix(ctx: Context):
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is None:
|
||||
ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event_path is not None
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert github_output is not None
|
||||
|
||||
gh_event = None
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path, encoding="utf-8").read())
|
||||
except Exception as exc:
|
||||
ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event is not None
|
||||
|
||||
_matrix: dict[str, list[dict[str, str]]] = {
|
||||
"linux": [],
|
||||
"macos": [],
|
||||
"windows": [],
|
||||
}
|
||||
|
||||
rpm_slugs = (
|
||||
"rockylinux",
|
||||
"amazonlinux",
|
||||
"fedora",
|
||||
"photon",
|
||||
)
|
||||
linux_skip_pkg_download_tests = (
|
||||
"opensuse-15",
|
||||
"windows",
|
||||
)
|
||||
for slug in sorted(tools.utils.get_golden_images()):
|
||||
if slug.startswith(linux_skip_pkg_download_tests):
|
||||
continue
|
||||
if "arm64" in slug:
|
||||
arch = "arm64"
|
||||
else:
|
||||
arch = "x86_64"
|
||||
if slug.startswith(rpm_slugs) and arch == "arm64":
|
||||
# While we maintain backwards compatible urls
|
||||
_matrix["linux"].append(
|
||||
{"distro-slug": slug, "arch": "aarch64", "pkg-type": "package"}
|
||||
)
|
||||
_matrix["linux"].append(
|
||||
{"distro-slug": slug, "arch": arch, "pkg-type": "package"}
|
||||
)
|
||||
if slug.startswith("ubuntu-22"):
|
||||
_matrix["linux"].append(
|
||||
{"distro-slug": slug, "arch": arch, "pkg-type": "onedir"}
|
||||
)
|
||||
for mac in TEST_SALT_LISTING["macos"]:
|
||||
if gh_event["repository"]["fork"] is True and mac.arch == "arm64":
|
||||
continue
|
||||
_matrix["macos"].append(
|
||||
{"distro-slug": mac.slug, "arch": mac.arch, "pkg-type": "package"}
|
||||
)
|
||||
|
||||
if gh_event["repository"]["fork"] is True:
|
||||
macos_idx = 0 # macos-12
|
||||
else:
|
||||
macos_idx = 1 # macos-13
|
||||
_matrix["macos"].append(
|
||||
{
|
||||
"distro-slug": TEST_SALT_LISTING["macos"][macos_idx].slug,
|
||||
"arch": TEST_SALT_LISTING["macos"][macos_idx].arch,
|
||||
"pkg-type": "onedir",
|
||||
}
|
||||
)
|
||||
|
||||
for win in TEST_SALT_LISTING["windows"][-1:]:
|
||||
for pkg_type in ("nsis", "msi", "onedir"):
|
||||
_matrix["windows"].append(
|
||||
{
|
||||
"distro-slug": win.slug,
|
||||
"arch": win.arch,
|
||||
"pkg-type": pkg_type,
|
||||
}
|
||||
)
|
||||
|
||||
ctx.info("Generated matrix:")
|
||||
ctx.print(_matrix, soft_wrap=True)
|
||||
|
||||
if github_output is not None:
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"matrix={json.dumps(_matrix)}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
@ci.command(
|
||||
name="get-releases",
|
||||
arguments={
|
||||
|
@ -1494,6 +842,87 @@ def _os_test_filter(osdef, transport, chunk, arm_runner):
|
|||
return True
|
||||
|
||||
|
||||
def _define_testrun(ctx, changed_files, labels, full):
|
||||
if not changed_files.exists():
|
||||
ctx.error(f"The '{changed_files}' file does not exist.")
|
||||
ctx.error(
|
||||
"FYI, the command 'tools process-changed-files <changed-files-path>' "
|
||||
"needs to run prior to this one."
|
||||
)
|
||||
ctx.exit(1)
|
||||
try:
|
||||
changed_files_contents = json.loads(changed_files.read_text())
|
||||
except Exception as exc:
|
||||
ctx.error(f"Could not load the changed files from '{changed_files}': {exc}")
|
||||
ctx.exit(1)
|
||||
|
||||
# Based on which files changed, or other things like PR labels we can
|
||||
# decide what to run, or even if the full test run should be running on the
|
||||
# pull request, etc...
|
||||
changed_pkg_requirements_files = json.loads(
|
||||
changed_files_contents["pkg_requirements_files"]
|
||||
)
|
||||
changed_test_requirements_files = json.loads(
|
||||
changed_files_contents["test_requirements_files"]
|
||||
)
|
||||
if full:
|
||||
ctx.info("Full test run chosen")
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
elif changed_pkg_requirements_files or changed_test_requirements_files:
|
||||
ctx.info(
|
||||
"Full test run chosen because there was a change made "
|
||||
"to the requirements files."
|
||||
)
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
elif "test:full" in labels:
|
||||
ctx.info("Full test run chosen because the label `test:full` is set.\n")
|
||||
testrun = TestRun(type="full", skip_code_coverage=True)
|
||||
else:
|
||||
testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
|
||||
testrun = TestRun(
|
||||
type="changed",
|
||||
skip_code_coverage=True,
|
||||
from_filenames=str(
|
||||
testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
|
||||
),
|
||||
)
|
||||
ctx.info(f"Writing {testrun_changed_files_path.name} ...")
|
||||
selected_changed_files = []
|
||||
for fpath in json.loads(changed_files_contents["testrun_files"]):
|
||||
if fpath.startswith(("tools/", "tasks/")):
|
||||
continue
|
||||
if fpath in ("noxfile.py",):
|
||||
continue
|
||||
if fpath == "tests/conftest.py":
|
||||
# In this particular case, just run the full test suite
|
||||
testrun["type"] = "full"
|
||||
ctx.info(
|
||||
f"Full test run chosen because there was a change to `{fpath}`."
|
||||
)
|
||||
selected_changed_files.append(fpath)
|
||||
testrun_changed_files_path.write_text("\n".join(sorted(selected_changed_files)))
|
||||
if testrun["type"] == "changed":
|
||||
testrun["selected_tests"] = {
|
||||
"core": False,
|
||||
"slow": False,
|
||||
"fast": True,
|
||||
"flaky": False,
|
||||
}
|
||||
if "test:slow" in labels:
|
||||
ctx.info("Slow tests chosen by `test:slow` label.")
|
||||
testrun["selected_tests"]["slow"] = True
|
||||
if "test:core" in labels:
|
||||
ctx.info("Core tests chosen by `test:core` label.")
|
||||
testrun["selected_tests"]["core"] = True
|
||||
if "test:no-fast" in labels:
|
||||
ctx.info("Fast tests deselected by `test:no-fast` label.")
|
||||
testrun["selected_tests"]["fast"] = False
|
||||
if "test:flaky-jail" in labels:
|
||||
ctx.info("Flaky jailed tests chosen by `test:flaky-jail` label.")
|
||||
testrun["selected_tests"]["flaky"] = True
|
||||
return testrun
|
||||
|
||||
|
||||
@ci.command(
|
||||
name="workflow-config",
|
||||
arguments={
|
||||
|
@ -1531,7 +960,7 @@ def workflow_config(
|
|||
):
|
||||
full = False
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
gh_event = None
|
||||
gh_event: dict[str, Any] = {}
|
||||
config: dict[str, Any] = {}
|
||||
|
||||
ctx.info(f"{'==== environment ====':^80s}")
|
||||
|
@ -1576,6 +1005,14 @@ def workflow_config(
|
|||
ctx.info(f"{pprint.pformat(labels)}")
|
||||
ctx.info(f"{'==== end labels ====':^80s}")
|
||||
|
||||
config["skip_code_coverage"] = True
|
||||
if "test:coverage" in labels:
|
||||
config["skip_code_coverage"] = False
|
||||
else:
|
||||
ctx.info("Skipping code coverage.")
|
||||
|
||||
config["testrun"] = _define_testrun(ctx, changed_files, labels, full)
|
||||
|
||||
ctx.info(f"{'==== github event ====':^80s}")
|
||||
ctx.info(f"{pprint.pformat(gh_event)}")
|
||||
ctx.info(f"{'==== end github event ====':^80s}")
|
||||
|
|
Loading…
Add table
Reference in a new issue