From 9f9d9b7b96f746d06ffad436b0f007d6266d6438 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 29 Apr 2024 17:00:46 +0100 Subject: [PATCH] Don't even try to run MacOS Arm jobs on forks --- .github/workflows/build-deps-ci-action.yml | 48 +- .github/workflows/build-deps-onedir.yml | 4 +- .github/workflows/build-packages.yml | 4 +- .github/workflows/build-salt-onedir.yml | 4 +- .github/workflows/nightly.yml | 2 + .github/workflows/staging.yml | 2 + .../templates/build-deps-ci-action.yml.jinja | 364 ---------- .../templates/build-macos-repo.yml.jinja | 1 + .../templates/build-onedir-repo.yml.jinja | 1 + .../test-package-downloads-action.yml.jinja | 673 ------------------ .../test-salt-pkg-repo-downloads.yml.jinja | 4 +- .github/workflows/test-action-linux.yml | 4 +- .github/workflows/test-action-macos.yml | 4 +- .github/workflows/test-action-windows.yml | 4 +- .../test-package-downloads-action.yml | 166 +---- tools/ci.py | 216 ++++++ tools/precommit/workflows.py | 320 +++------ tools/utils/__init__.py | 27 +- 18 files changed, 425 insertions(+), 1423 deletions(-) delete mode 100644 .github/workflows/templates/build-deps-ci-action.yml.jinja delete mode 100644 .github/workflows/templates/test-package-downloads-action.yml.jinja diff --git a/.github/workflows/build-deps-ci-action.yml b/.github/workflows/build-deps-ci-action.yml index 58ef83be4f1..73c328a97f7 100644 --- a/.github/workflows/build-deps-ci-action.yml +++ b/.github/workflows/build-deps-ci-action.yml @@ -47,8 +47,36 @@ env: jobs: + generate-matrix: + name: Generate Matrix + runs-on: ubuntu-latest + outputs: + matrix-include: ${{ steps.generate-matrix.outputs.matrix }} + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + + - name: Generate Test Matrix + id: generate-matrix + run: | + tools ci deps-matrix + + linux-dependencies: name: Linux + needs: + - generate-matrix runs-on: - self-hosted - linux @@ -59,11 +87,7 @@ jobs: strategy: fail-fast: false matrix: - include: - - distro-slug: centos-7 - arch: x86_64 - - distro-slug: centos-7-arm64 - arch: arm64 + include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['linux'] }} steps: - name: "Throttle Builds" @@ -163,16 +187,14 @@ jobs: macos-dependencies: name: MacOS + needs: + - generate-matrix runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} timeout-minutes: 90 strategy: fail-fast: false matrix: - include: - - distro-slug: macos-12 - arch: x86_64 - - distro-slug: macos-13-arm64 - arch: arm64 + include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['macos'] }} steps: - name: "Throttle Builds" @@ -250,6 +272,8 @@ jobs: path: nox.macos.${{ matrix.arch }}.tar.* windows-dependencies: + needs: + - generate-matrix name: Windows runs-on: - self-hosted @@ -261,9 +285,7 @@ jobs: strategy: fail-fast: false matrix: - include: - - distro-slug: windows-2022 - arch: amd64 + include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['windows'] }} steps: - name: "Throttle Builds" diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index df4d699a87c..26a5812cc94 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -92,9 +92,7 @@ jobs: fail-fast: false max-parallel: 2 matrix: - arch: - - x86_64 - - arm64 + arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} runs-on: - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} env: diff --git a/.github/workflows/build-packages.yml b/.github/workflows/build-packages.yml index 58b181eab1c..652bfde94fc 100644 --- a/.github/workflows/build-packages.yml +++ b/.github/workflows/build-packages.yml @@ -51,9 +51,7 @@ jobs: strategy: fail-fast: false matrix: - arch: - - x86_64 - - arm64 + arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} source: - ${{ inputs.source }} diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index ff4b7a3857f..73f9533fb51 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -99,9 +99,7 @@ jobs: fail-fast: false max-parallel: 2 matrix: - arch: - - x86_64 - - arm64 + arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} runs-on: - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e9a8dec7361..0401a7ff4b7 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -2525,6 +2525,7 @@ jobs: path: artifacts/pkgs/incoming - name: Download macOS Arch64 Packages + if: ${{ ! github.event.repository.fork }} uses: actions/download-artifact@v4 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-arm64-macos @@ -2629,6 +2630,7 @@ jobs: path: artifacts/pkgs/incoming - name: Download macOS arm64 Onedir Archive + if: ${{ ! github.event.repository.fork }} uses: actions/download-artifact@v4 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-macos-arm64.tar.xz diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 57d3ed67026..c3c3db9d8b4 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2367,6 +2367,7 @@ jobs: path: artifacts/pkgs/incoming - name: Download macOS Arch64 Packages + if: ${{ ! github.event.repository.fork }} uses: actions/download-artifact@v4 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-arm64-macos @@ -2471,6 +2472,7 @@ jobs: path: artifacts/pkgs/incoming - name: Download macOS arm64 Onedir Archive + if: ${{ ! github.event.repository.fork }} uses: actions/download-artifact@v4 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-macos-arm64.tar.xz diff --git a/.github/workflows/templates/build-deps-ci-action.yml.jinja b/.github/workflows/templates/build-deps-ci-action.yml.jinja deleted file mode 100644 index a08f02b0d3d..00000000000 --- a/.github/workflows/templates/build-deps-ci-action.yml.jinja +++ /dev/null @@ -1,364 +0,0 @@ ---- -name: Install Test Dependencies - -on: - workflow_call: - inputs: - nox-session: - required: true - type: string - description: The nox session to run - salt-version: - type: string - required: true - description: The Salt version to set prior to running tests. - cache-prefix: - required: true - type: string - description: Seed used to invalidate caches - nox-version: - required: true - type: string - description: The nox version to install - nox-archive-hash: - required: true - type: string - description: Nox Tarball Cache Hash - python-version: - required: false - type: string - description: The python version to run tests with - default: "3.10" - package-name: - required: false - type: string - description: The onedir package name to use - default: salt - - -env: - COLUMNS: 190 - AWS_MAX_ATTEMPTS: "10" - AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - PIP_DISABLE_PIP_VERSION_CHECK: "1" - RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" - -jobs: - - linux-dependencies: - name: Linux - runs-on: - - self-hosted - - linux - - bastion - env: - USE_S3_CACHE: 'true' - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: - <%- for arch, build_distro_slug in build_ci_deps_listing["linux"] %> - - distro-slug: <{ build_distro_slug }> - arch: <{ arch }> - <%- endfor %> - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" - - - name: Checkout Source Code - uses: actions/checkout@v4 - - - name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} - id: nox-dependencies-cache - uses: ./.github/actions/cache - with: - path: nox.linux.${{ matrix.arch }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|linux|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} - - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz - - - name: PyPi Proxy - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt - - - name: Setup Python Tools Scripts - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci - - - name: Get Salt Project GitHub Actions Bot Environment - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") - SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) - echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - - - name: Start VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - id: spin-up-vm - run: | - tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} - - - name: List Free Space - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true - - - name: Upload Checkout To VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm rsync ${{ matrix.distro-slug }} - - - name: Install Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }} - - - name: Cleanup .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }} - - - name: Compress .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }} - - - name: Download Compressed .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm download-dependencies ${{ matrix.distro-slug }} - - - name: Destroy VM - if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} - - - name: Upload Nox Requirements Tarball - uses: actions/upload-artifact@v4 - with: - name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }} - path: nox.linux.${{ matrix.arch }}.tar.* - - macos-dependencies: - name: MacOS - runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: - <%- for arch, build_distro_slug in build_ci_deps_listing["macos"] %> - - distro-slug: <{ build_distro_slug }> - arch: <{ arch }> - <%- endfor %> - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - name: Checkout Source Code - uses: actions/checkout@v4 - - - name: Cache nox.macos.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} - id: nox-dependencies-cache - uses: ./.github/actions/cache - with: - path: nox.macos.${{ matrix.arch }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|macos|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} - - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-macos-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-macos-${{ matrix.arch }}.tar.xz - - - name: Set up Python ${{ inputs.python-version }} - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - - - name: Install System Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - brew install openssl@3 - - - name: Install Nox - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - python3 -m pip install 'nox==${{ inputs.nox-version }}' - - - name: Install Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - env: - PRINT_TEST_SELECTION: "0" - PRINT_SYSTEM_INFO: "0" - run: | - export PYCURL_SSL_LIBRARY=openssl - export LDFLAGS="-L/usr/local/opt/openssl@3/lib" - export CPPFLAGS="-I/usr/local/opt/openssl@3/include" - export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig" - nox --install-only -e ${{ inputs.nox-session }} - - - name: Cleanup .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - nox --force-color -e "pre-archive-cleanup(pkg=False)" - - - name: Compress .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - nox --force-color -e compress-dependencies -- macos ${{ matrix.arch }} - - - name: Upload Nox Requirements Tarball - uses: actions/upload-artifact@v4 - with: - name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }} - path: nox.macos.${{ matrix.arch }}.tar.* - - windows-dependencies: - name: Windows - runs-on: - - self-hosted - - linux - - bastion - env: - USE_S3_CACHE: 'true' - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: - <%- for arch, build_distro_slug in build_ci_deps_listing["windows"] %> - - distro-slug: <{ build_distro_slug }> - arch: <{ arch }> - <%- endfor %> - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" - - - name: Checkout Source Code - uses: actions/checkout@v4 - - - name: Cache nox.windows.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} - id: nox-dependencies-cache - uses: ./.github/actions/cache - with: - path: nox.windows.${{ matrix.arch }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|windows|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} - - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz - - - name: PyPi Proxy - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt - - - name: Setup Python Tools Scripts - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci - - - name: Get Salt Project GitHub Actions Bot Environment - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") - SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) - echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - - - name: Start VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - id: spin-up-vm - run: | - tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} - - - name: List Free Space - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true - - - name: Upload Checkout To VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm rsync ${{ matrix.distro-slug }} - - - name: Install Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }} - - - name: Cleanup .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }} - - - name: Compress .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }} - - - name: Download Compressed .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm download-dependencies ${{ matrix.distro-slug }} - - - name: Destroy VM - if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} - - - name: Upload Nox Requirements Tarball - uses: actions/upload-artifact@v4 - with: - name: nox-windows-${{ matrix.arch }}-${{ inputs.nox-session }} - path: nox.windows.${{ matrix.arch }}.tar.* diff --git a/.github/workflows/templates/build-macos-repo.yml.jinja b/.github/workflows/templates/build-macos-repo.yml.jinja index 509a0c8b155..835e366bf52 100644 --- a/.github/workflows/templates/build-macos-repo.yml.jinja +++ b/.github/workflows/templates/build-macos-repo.yml.jinja @@ -26,6 +26,7 @@ path: artifacts/pkgs/incoming - name: Download macOS Arch64 Packages + if: ${{ ! github.event.repository.fork }} uses: actions/download-artifact@v4 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-arm64-macos diff --git a/.github/workflows/templates/build-onedir-repo.yml.jinja b/.github/workflows/templates/build-onedir-repo.yml.jinja index 582a4cb7ae5..c6b51f07166 100644 --- a/.github/workflows/templates/build-onedir-repo.yml.jinja +++ b/.github/workflows/templates/build-onedir-repo.yml.jinja @@ -38,6 +38,7 @@ path: artifacts/pkgs/incoming - name: Download macOS arm64 Onedir Archive + if: ${{ ! github.event.repository.fork }} uses: actions/download-artifact@v4 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-macos-arm64.tar.xz diff --git a/.github/workflows/templates/test-package-downloads-action.yml.jinja b/.github/workflows/templates/test-package-downloads-action.yml.jinja deleted file mode 100644 index 3e8dab8ee50..00000000000 --- a/.github/workflows/templates/test-package-downloads-action.yml.jinja +++ /dev/null @@ -1,673 +0,0 @@ -name: Test Download Packages - -on: - workflow_call: - inputs: - salt-version: - type: string - required: true - description: The Salt version of the packages to install and test - cache-prefix: - required: true - type: string - description: Seed used to invalidate caches - environment: - required: true - type: string - description: The environment to run tests against - latest-release: - required: true - type: string - description: The latest salt release - nox-version: - required: true - type: string - description: The nox version to install - python-version: - required: false - type: string - description: The python version to run tests with - default: "3.10" - package-name: - required: false - type: string - description: The onedir package name to use - default: salt - skip-code-coverage: - required: false - type: boolean - description: Skip code coverage - default: false - nox-session: - required: false - type: string - description: The nox session to run - default: ci-test-onedir - -env: - COLUMNS: 190 - AWS_MAX_ATTEMPTS: "10" - AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - PIP_DISABLE_PIP_VERSION_CHECK: "1" - RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" - -jobs: - - linux: - name: Linux - runs-on: - - self-hosted - - linux - - bastion - env: - USE_S3_CACHE: 'true' - environment: ${{ inputs.environment }} - timeout-minutes: 120 # 2 Hours - More than this and something is wrong - strategy: - fail-fast: false - matrix: - include: - <%- for os in test_salt_pkg_downloads_listing["linux"] %> - - distro-slug: <{ os.slug }> - arch: <{ os.arch }> - pkg-type: <{ os.pkg_type }> - <%- endfor %> - - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" - - - name: Checkout Source Code - uses: actions/checkout@v4 - - - name: Download Onedir Tarball as an Artifact - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.xz - - - name: Download nox.linux.${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }} - uses: actions/download-artifact@v4 - with: - name: nox-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}-${{ inputs.nox-session }} - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-linux - - - name: Get Salt Project GitHub Actions Bot Environment - run: | - TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") - SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) - echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - - - name: Start VM - id: spin-up-vm - run: | - tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} - - - name: List Free Space - run: | - tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true - - - name: Upload Checkout To VM - run: | - tools --timestamps vm rsync ${{ matrix.distro-slug }} - - - name: Decompress .nox Directory - run: | - tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }} - - - name: Show System Info - run: | - tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \ - --nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs - - - name: Run Package Download Tests - env: - SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.arch }} - SALT_REPO_TYPE: ${{ inputs.environment }} - SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} - SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} - SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} - SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} - SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" - LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" - DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} - run: | - tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ - --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs - - - name: Combine Coverage Reports - if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' - run: | - tools --timestamps vm combine-coverage ${{ matrix.distro-slug }} - - - name: Download Test Run Artifacts - id: download-artifacts-from-vm - if: always() && steps.spin-up-vm.outcome == 'success' - run: | - tools --timestamps vm download-artifacts ${{ matrix.distro-slug }} - # Delete the salt onedir, we won't need it anymore and it will prevent - # from it showing in the tree command below - rm -rf artifacts/salt* - tree -a artifacts - - - name: Destroy VM - if: always() - run: | - tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true - - - name: Fix file ownership - run: | - sudo chown -R "$(id -un)" . - - - name: Install Codecov CLI - if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' - run: | - # We can't yet use tokenless uploads with the codecov CLI - # python3 -m pip install codecov-cli - # - curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import - curl -Os https://uploader.codecov.io/latest/linux/codecov - curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM - curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig - gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM - shasum -a 256 -c codecov.SHA256SUM - chmod +x codecov - - - name: Upload Source Code Coverage To Codecov - if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' - run: | - if [ ! -s artifacts/coverage/salt.xml ]; then - echo "The artifacts/coverage/salt.xml file does not exist" - exit 1 - fi - # We can't yet use tokenless uploads with the codecov CLI - #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ - # do-upload --git-service github --sha ${{ github.sha }} \ - # --file artifacts/coverage/salt.xml \ - # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ - # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs - n=0 - until [ "$n" -ge 5 ] - do - if ./codecov --file artifacts/coverage/salt.xml \ - --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ - --flags salt,${{ matrix.distro-slug }},pkg \ - --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then - rc=$? - break - fi - rc=$? - n=$((n+1)) - sleep 15 - done - if [ "$rc" -ne 0 ]; then - echo "Failed to upload codecov stats" - exit 1 - fi - - - name: Upload Tests Code Coverage To Codecov - if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' - run: | - if [ ! -s artifacts/coverage/tests.xml ]; then - echo "The artifacts/coverage/tests.xml file does not exist" - exit 1 - fi - # We can't yet use tokenless uploads with the codecov CLI - #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ - # do-upload --git-service github --sha ${{ github.sha }} \ - # --file artifacts/coverage/tests.xml \ - # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ - # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs - n=0 - until [ "$n" -ge 5 ] - do - if ./codecov --file artifacts/coverage/tests.xml \ - --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ - --flags tests,${{ matrix.distro-slug }},pkg \ - --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then - rc=$? - break - fi - rc=$? - n=$((n+1)) - sleep 15 - done - if [ "$rc" -ne 0 ]; then - echo "Failed to upload codecov stats" - exit 1 - fi - - - name: Upload Test Run Artifacts - if: always() && steps.download-artifacts-from-vm.outcome == 'success' - uses: actions/upload-artifact@v4 - with: - name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}-${{ matrix.pkg-type }} - path: | - artifacts - !artifacts/salt/* - !artifacts/salt-*.tar.* - - - macos: - name: MacOS - runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} - env: - USE_S3_CACHE: 'false' - environment: ${{ inputs.environment }} - timeout-minutes: 120 # 2 Hours - More than this and something is wrong - strategy: - fail-fast: false - matrix: - include: - <%- for os in test_salt_pkg_downloads_listing["macos"] %> - - distro-slug: <{ os.slug }> - arch: <{ os.arch }> - pkg-type: <{ os.pkg_type }> - <%- endfor %> - - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - name: Checkout Source Code - uses: actions/checkout@v4 - - - name: Download Onedir Tarball as an Artifact - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-macos-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Install System Dependencies - run: | - brew install tree - - - name: Decompress Onedir Tarball - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-macos-${{ matrix.arch }}.tar.xz - - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - update-environment: true - - - name: Install Nox - run: | - python3 -m pip install 'nox==${{ inputs.nox-version }}' - - - name: Download nox.macos.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }} - uses: actions/download-artifact@v4 - with: - name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }} - - - name: Decompress .nox Directory - run: | - nox --force-color -e decompress-dependencies -- macos ${{ matrix.arch }} - - - name: Show System Info - env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_SYSTEM_INFO_ONLY: "1" - run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs - - - name: Run Package Download Tests - env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_TEST_SELECTION: "0" - PRINT_TEST_PLAN_ONLY: "0" - PRINT_SYSTEM_INFO: "0" - RERUN_FAILURES: "1" - GITHUB_ACTIONS_PIPELINE: "1" - SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" - COVERAGE_CONTEXT: ${{ matrix.distro-slug }} - SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.arch }} - LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" - SALT_REPO_TYPE: ${{ inputs.environment }} - SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} - SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} - SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} - SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} - DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} - run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs - - - name: Fix file ownership - run: | - sudo chown -R "$(id -un)" . - - - name: Combine Coverage Reports - if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' - run: | - nox --force-color -e combine-coverage - - - name: Prepare Test Run Artifacts - id: download-artifacts-from-vm - if: always() && job.status != 'cancelled' - run: | - # Delete the salt onedir, we won't need it anymore and it will prevent - # from it showing in the tree command below - rm -rf artifacts/salt* - tree -a artifacts - - - name: Install Codecov CLI - if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' - run: | - # We can't yet use tokenless uploads with the codecov CLI - # python3 -m pip install codecov-cli - # - curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import - curl -Os https://uploader.codecov.io/latest/macos/codecov - curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM - curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig - gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM - shasum -a 256 -c codecov.SHA256SUM - chmod +x codecov - - - name: Upload Source Code Coverage To Codecov - if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' - run: | - if [ ! -s artifacts/coverage/salt.xml ]; then - echo "The artifacts/coverage/salt.xml file does not exist" - exit 1 - fi - # We can't yet use tokenless uploads with the codecov CLI - #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ - # do-upload --git-service github --sha ${{ github.sha }} \ - # --file artifacts/coverage/salt.xml \ - # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ - # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs - n=0 - until [ "$n" -ge 5 ] - do - if ./codecov --file artifacts/coverage/salt.xml \ - --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ - --flags salt,${{ matrix.distro-slug }},pkg \ - --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then - rc=$? - break - fi - rc=$? - n=$((n+1)) - sleep 15 - done - if [ "$rc" -ne 0 ]; then - echo "Failed to upload codecov stats" - exit 1 - fi - - - name: Upload Tests Code Coverage To Codecov - if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' - run: | - if [ ! -s artifacts/coverage/tests.xml ]; then - echo "The artifacts/coverage/tests.xml file does not exist" - exit 1 - fi - # We can't yet use tokenless uploads with the codecov CLI - #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ - # do-upload --git-service github --sha ${{ github.sha }} \ - # --file artifacts/coverage/tests.xml \ - # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ - # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs - n=0 - until [ "$n" -ge 5 ] - do - if ./codecov --file artifacts/coverage/tests.xml \ - --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ - --flags tests,${{ matrix.distro-slug }},pkg \ - --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then - rc=$? - break - fi - rc=$? - n=$((n+1)) - sleep 15 - done - if [ "$rc" -ne 0 ]; then - echo "Failed to upload codecov stats" - exit 1 - fi - - - name: Upload Test Run Artifacts - if: always() - uses: actions/upload-artifact@v4 - with: - name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}-${{ matrix.pkg-type }} - path: | - artifacts - !artifacts/salt/* - !artifacts/salt-*.tar.* - - - windows: - name: Windows - env: - USE_S3_CACHE: 'true' - runs-on: - - self-hosted - - linux - - bastion - environment: ${{ inputs.environment }} - timeout-minutes: 120 # 2 Hours - More than this and something is wrong - strategy: - fail-fast: false - matrix: - include: - <%- for os in test_salt_pkg_downloads_listing["windows"] %> - - distro-slug: <{ os.slug }> - arch: <{ os.arch }> - pkg-type: <{ os.pkg_type }> - <%- endfor %> - - steps: - - name: Checkout Source Code - uses: actions/checkout@v4 - - - name: Download Onedir Tarball as an Artifact - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz - - - name: Download nox.windows.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }} - uses: actions/download-artifact@v4 - with: - name: nox-windows-${{ matrix.arch }}-${{ inputs.nox-session }} - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-windows - - - name: Get Salt Project GitHub Actions Bot Environment - run: | - TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") - SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) - echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - - - name: Start VM - id: spin-up-vm - run: | - tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} - - - name: List Free Space - run: | - tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true - - - name: Upload Checkout To VM - run: | - tools --timestamps vm rsync ${{ matrix.distro-slug }} - - - name: Decompress .nox Directory - run: | - tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }} - - - name: Show System Info - run: | - tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \ - --nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs - - - name: Run Package Download Tests - env: - SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.arch }} - LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" - SALT_REPO_TYPE: ${{ inputs.environment }} - SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} - SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} - SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} - SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} - SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" - DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} - run: | - tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ - --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs - - - name: Combine Coverage Reports - if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' - run: | - tools --timestamps vm combine-coverage ${{ matrix.distro-slug }} - - - name: Download Test Run Artifacts - id: download-artifacts-from-vm - if: always() && steps.spin-up-vm.outcome == 'success' - run: | - tools --timestamps vm download-artifacts ${{ matrix.distro-slug }} - # Delete the salt onedir, we won't need it anymore and it will prevent - # from it showing in the tree command below - rm -rf artifacts/salt* - tree -a artifacts - - - name: Destroy VM - if: always() - run: | - tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true - - - name: Fix file ownership - run: | - sudo chown -R "$(id -un)" . - - - name: Install Codecov CLI - if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' - run: | - # We can't yet use tokenless uploads with the codecov CLI - # python3 -m pip install codecov-cli - # - curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import - curl -Os https://uploader.codecov.io/latest/linux/codecov - curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM - curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig - gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM - shasum -a 256 -c codecov.SHA256SUM - chmod +x codecov - - - name: Upload Source Code Coverage To Codecov - if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' - run: | - if [ ! -s artifacts/coverage/salt.xml ]; then - echo "The artifacts/coverage/salt.xml file does not exist" - exit 1 - fi - # We can't yet use tokenless uploads with the codecov CLI - #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ - # do-upload --git-service github --sha ${{ github.sha }} \ - # --file artifacts/coverage/salt.xml \ - # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ - # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs - n=0 - until [ "$n" -ge 5 ] - do - if ./codecov --file artifacts/coverage/salt.xml \ - --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ - --flags salt,${{ matrix.distro-slug }},pkg \ - --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then - rc=$? - break - fi - rc=$? - n=$((n+1)) - sleep 15 - done - if [ "$rc" -ne 0 ]; then - echo "Failed to upload codecov stats" - exit 1 - fi - - - name: Upload Tests Code Coverage To Codecov - if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' - run: | - if [ ! -s artifacts/coverage/tests.xml ]; then - echo "The artifacts/coverage/tests.xml file does not exist" - exit 1 - fi - # We can't yet use tokenless uploads with the codecov CLI - #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ - # do-upload --git-service github --sha ${{ github.sha }} \ - # --file artifacts/coverage/tests.xml \ - # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ - # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs - n=0 - until [ "$n" -ge 5 ] - do - if ./codecov --file artifacts/coverage/tests.xml \ - --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ - --flags tests,${{ matrix.distro-slug }},pkg \ - --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then - rc=$? - break - fi - rc=$? - n=$((n+1)) - sleep 15 - done - if [ "$rc" -ne 0 ]; then - echo "Failed to upload codecov stats" - exit 1 - fi - - - name: Upload Test Run Artifacts - if: always() && steps.download-artifacts-from-vm.outcome == 'success' - uses: actions/upload-artifact@v4 - with: - name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }}-${{ matrix.pkg-type }} - path: | - artifacts - !artifacts/salt/* - !artifacts/salt-*.tar.* diff --git a/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja index d28614620db..d547bd504db 100644 --- a/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja @@ -13,9 +13,7 @@ needs: - prepare-workflow - publish-repositories - <%- for slug in test_salt_pkg_downloads_needs_slugs %> - - <{ slug }> - <%- endfor %> + - build-ci-deps <%- if gh_environment == "release" %> - download-onedir-artifact <%- else %> diff --git a/.github/workflows/test-action-linux.yml b/.github/workflows/test-action-linux.yml index 101c2940196..b55ec755f5c 100644 --- a/.github/workflows/test-action-linux.yml +++ b/.github/workflows/test-action-linux.yml @@ -83,6 +83,7 @@ jobs: runs-on: ubuntu-latest outputs: matrix-include: ${{ steps.generate-matrix.outputs.matrix }} + build-reports: ${{ steps.generate-matrix.outputs.build-reports }} steps: - name: "Throttle Builds" @@ -299,10 +300,11 @@ jobs: report: name: Test Reports - if: always() && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' runs-on: ubuntu-latest needs: - test + - generate-matrix steps: - name: Checkout Source Code diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 3a3f1573b31..1da200b6ae7 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -80,6 +80,7 @@ jobs: runs-on: ubuntu-latest outputs: matrix-include: ${{ steps.generate-matrix.outputs.matrix }} + build-reports: ${{ steps.generate-matrix.outputs.build-reports }} steps: - name: "Throttle Builds" @@ -327,10 +328,11 @@ jobs: report: name: Test Reports - if: always() && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' runs-on: ubuntu-latest needs: - test + - generate-matrix steps: - name: Checkout Source Code diff --git a/.github/workflows/test-action-windows.yml b/.github/workflows/test-action-windows.yml index 39fc2215c73..65c407f69f9 100644 --- a/.github/workflows/test-action-windows.yml +++ b/.github/workflows/test-action-windows.yml @@ -83,6 +83,7 @@ jobs: runs-on: ubuntu-latest outputs: matrix-include: ${{ steps.generate-matrix.outputs.matrix }} + build-reports: ${{ steps.generate-matrix.outputs.build-reports }} steps: - name: "Throttle Builds" @@ -300,10 +301,11 @@ jobs: report: name: Test Reports - if: always() && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' runs-on: ubuntu-latest needs: - test + - generate-matrix steps: - name: Checkout Source Code diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index eaed094fa0b..7532813999a 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -55,8 +55,35 @@ env: jobs: + generate-matrix: + name: Generate Matrix + runs-on: ubuntu-latest + outputs: + matrix-include: ${{ steps.generate-matrix.outputs.matrix }} + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + + - name: Generate Test Matrix + id: generate-matrix + run: | + tools ci pkg-downloads-matrix + linux: name: Linux + needs: + - generate-matrix runs-on: - self-hosted - linux @@ -68,115 +95,7 @@ jobs: strategy: fail-fast: false matrix: - include: - - distro-slug: amazonlinux-2 - arch: x86_64 - pkg-type: package - - distro-slug: amazonlinux-2-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: amazonlinux-2-arm64 - arch: arm64 - pkg-type: package - - distro-slug: amazonlinux-2023 - arch: x86_64 - pkg-type: package - - distro-slug: amazonlinux-2023-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: amazonlinux-2023-arm64 - arch: arm64 - pkg-type: package - - distro-slug: centos-7 - arch: x86_64 - pkg-type: package - - distro-slug: centos-7-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: centos-7-arm64 - arch: arm64 - pkg-type: package - - distro-slug: debian-11 - arch: x86_64 - pkg-type: package - - distro-slug: debian-11-arm64 - arch: arm64 - pkg-type: package - - distro-slug: debian-12 - arch: x86_64 - pkg-type: package - - distro-slug: debian-12-arm64 - arch: arm64 - pkg-type: package - - distro-slug: fedora-39 - arch: x86_64 - pkg-type: package - - distro-slug: fedora-39-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: fedora-39-arm64 - arch: arm64 - pkg-type: package - - distro-slug: photonos-4 - arch: x86_64 - pkg-type: package - - distro-slug: photonos-4-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: photonos-4-arm64 - arch: arm64 - pkg-type: package - - distro-slug: photonos-5 - arch: x86_64 - pkg-type: package - - distro-slug: photonos-5-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: photonos-5-arm64 - arch: arm64 - pkg-type: package - - distro-slug: rockylinux-8 - arch: x86_64 - pkg-type: package - - distro-slug: rockylinux-8-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: rockylinux-8-arm64 - arch: arm64 - pkg-type: package - - distro-slug: rockylinux-9 - arch: x86_64 - pkg-type: package - - distro-slug: rockylinux-9-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: rockylinux-9-arm64 - arch: arm64 - pkg-type: package - - distro-slug: ubuntu-20.04 - arch: x86_64 - pkg-type: package - - distro-slug: ubuntu-20.04-arm64 - arch: arm64 - pkg-type: package - - distro-slug: ubuntu-22.04 - arch: x86_64 - pkg-type: package - - distro-slug: ubuntu-22.04 - arch: x86_64 - pkg-type: onedir - - distro-slug: ubuntu-22.04-arm64 - arch: arm64 - pkg-type: package - - distro-slug: ubuntu-22.04-arm64 - arch: arm64 - pkg-type: onedir - - distro-slug: ubuntu-23.04 - arch: x86_64 - pkg-type: package - - distro-slug: ubuntu-23.04-arm64 - arch: arm64 - pkg-type: package + include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['linux'] }} steps: @@ -372,6 +291,8 @@ jobs: macos: name: MacOS + needs: + - generate-matrix runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} env: USE_S3_CACHE: 'false' @@ -380,19 +301,7 @@ jobs: strategy: fail-fast: false matrix: - include: - - distro-slug: macos-12 - arch: x86_64 - pkg-type: package - - distro-slug: macos-13 - arch: x86_64 - pkg-type: package - - distro-slug: macos-13-arm64 - arch: arm64 - pkg-type: package - - distro-slug: macos-13-arm64 - arch: arm64 - pkg-type: onedir + include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['macos'] }} steps: @@ -579,6 +488,8 @@ jobs: windows: name: Windows + needs: + - generate-matrix env: USE_S3_CACHE: 'true' runs-on: @@ -590,16 +501,7 @@ jobs: strategy: fail-fast: false matrix: - include: - - distro-slug: windows-2022 - arch: amd64 - pkg-type: nsis - - distro-slug: windows-2022 - arch: amd64 - pkg-type: msi - - distro-slug: windows-2022 - arch: amd64 - pkg-type: onedir + include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['windows'] }} steps: - name: Checkout Source Code diff --git a/tools/ci.py b/tools/ci.py index 87b5c94ab40..e89d458ac40 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -19,6 +19,7 @@ from ptscripts import Context, command_group import tools.utils import tools.utils.gh +from tools.precommit.workflows import TEST_SALT_LISTING if sys.version_info < (3, 11): from typing_extensions import NotRequired, TypedDict @@ -657,6 +658,24 @@ def matrix( """ Generate the test matrix. """ + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is None: + ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event_path is not None + + gh_event = None + try: + gh_event = json.loads(open(gh_event_path, encoding="utf-8").read()) + except Exception as exc: + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event is not None + _matrix = [] _splits = { "functional": 4, @@ -710,10 +729,19 @@ def matrix( ctx.info("Generated matrix:") ctx.print(_matrix, soft_wrap=True) + if ( + gh_event["repository"]["fork"] is True + and "macos" in distro_slug + and "arm64" in distro_slug + ): + ctx.warn("Forks don't have access to MacOS 13 Arm64. Clearning the matrix.") + _matrix.clear() + github_output = os.environ.get("GITHUB_OUTPUT") if github_output is not None: with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"matrix={json.dumps(_matrix)}\n") + wfh.write(f"build-reports={json.dumps(len(_matrix) > 0)}\n") ctx.exit(0) @@ -746,9 +774,28 @@ def pkg_matrix( """ Generate the test matrix. """ + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is None: + ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event_path is not None + + gh_event = None + try: + gh_event = json.loads(open(gh_event_path, encoding="utf-8").read()) + except Exception as exc: + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event is not None + github_output = os.environ.get("GITHUB_OUTPUT") if github_output is None: ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") + if TYPE_CHECKING: assert testing_releases @@ -873,6 +920,175 @@ def pkg_matrix( ctx.info("Generated matrix:") ctx.print(_matrix, soft_wrap=True) + if ( + gh_event["repository"]["fork"] is True + and "macos" in distro_slug + and "arm64" in distro_slug + ): + ctx.warn("Forks don't have access to MacOS 13 Arm64. Clearning the matrix.") + _matrix.clear() + + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"matrix={json.dumps(_matrix)}\n") + wfh.write(f"build-reports={json.dumps(len(_matrix) > 0)}\n") + ctx.exit(0) + + +@ci.command(name="deps-matrix") +def get_ci_deps_matrix(ctx: Context): + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is None: + ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event_path is not None + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert github_output is not None + + gh_event = None + try: + gh_event = json.loads(open(gh_event_path, encoding="utf-8").read()) + except Exception as exc: + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event is not None + + _matrix = { + "linux": [ + {"distro-slug": "centos-7", "arch": "x86_64"}, + {"distro-slug": "centos-7-arm64", "arch": "arm64"}, + ], + "macos": [ + {"distro-slug": "macos-12", "arch": "x86_64"}, + ], + "windows": [ + {"distro-slug": "windows-2022", "arch": "amd64"}, + ], + } + if gh_event["repository"]["fork"] is not True: + _matrix["macos"].append( + { + "distro-slug": "macos-13-arm64", + "arch": "arm64", + } + ) + + ctx.info("Generated matrix:") + ctx.print(_matrix, soft_wrap=True) + + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"matrix={json.dumps(_matrix)}\n") + ctx.exit(0) + + +@ci.command(name="pkg-downloads-matrix") +def get_pkg_downloads_matrix(ctx: Context): + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is None: + ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event_path is not None + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert github_output is not None + + gh_event = None + try: + gh_event = json.loads(open(gh_event_path, encoding="utf-8").read()) + except Exception as exc: + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event is not None + + _matrix: dict[str, list[dict[str, str]]] = { + "linux": [], + "macos": [], + "windows": [], + } + + rpm_slugs = ( + "rockylinux", + "amazonlinux", + "centos", + "fedora", + "photon", + ) + linux_skip_pkg_download_tests = ( + "archlinux-lts", + "opensuse-15", + "windows", + ) + for slug in sorted(tools.utils.get_golden_images()): + if slug.startswith(linux_skip_pkg_download_tests): + continue + if "arm64" in slug: + arch = "arm64" + else: + arch = "x86_64" + if slug.startswith(rpm_slugs) and arch == "arm64": + # While we maintain backwards compatible urls + _matrix["linux"].append( + {"distro-slug": slug, "arch": "aarch64", "pkg-type": "package"} + ) + _matrix["linux"].append( + {"distro-slug": slug, "arch": arch, "pkg-type": "package"} + ) + if slug.startswith("ubuntu-22"): + _matrix["linux"].append( + {"distro-slug": slug, "arch": arch, "pkg-type": "onedir"} + ) + for mac in TEST_SALT_LISTING["macos"]: + if gh_event["repository"]["fork"] is True and mac.arch == "arm64": + continue + _matrix["macos"].append( + {"distro-slug": mac.slug, "arch": mac.arch, "pkg-type": "package"} + ) + + if gh_event["repository"]["fork"] is True: + macos_idx = 0 # macos-12 + else: + macos_idx = 1 # macos-13 + _matrix["macos"].append( + { + "distro-slug": TEST_SALT_LISTING["macos"][macos_idx].slug, + "arch": TEST_SALT_LISTING["macos"][macos_idx].arch, + "pkg-type": "onedir", + } + ) + + for win in TEST_SALT_LISTING["windows"][-1:]: + for pkg_type in ("nsis", "msi", "onedir"): + _matrix["windows"].append( + { + "distro-slug": win.slug, + "arch": win.arch, + "pkg-type": pkg_type, + } + ) + + ctx.info("Generated matrix:") + ctx.print(_matrix, soft_wrap=True) + if github_output is not None: with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"matrix={json.dumps(_matrix)}\n") diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index 86585f560e5..61e39995d21 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -7,31 +7,112 @@ from __future__ import annotations import logging import shutil -import sys from typing import TYPE_CHECKING, cast from jinja2 import Environment, FileSystemLoader, StrictUndefined from ptscripts import Context, command_group import tools.utils -from tools.utils import Linux, MacOS, Windows - -if sys.version_info < (3, 11): - from typing_extensions import TypedDict -else: - from typing import TypedDict # pylint: disable=no-name-in-module +from tools.utils import Linux, MacOS, PlatformDefinitions, Windows log = logging.getLogger(__name__) WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" TEMPLATES = WORKFLOWS / "templates" - -class PlatformDefinitions(TypedDict): - linux: list[Linux] - macos: list[MacOS] - windows: list[Windows] - +TEST_SALT_LISTING = PlatformDefinitions( + { + "linux": [ + Linux(slug="rockylinux-8", display_name="Rocky Linux 8", arch="x86_64"), + Linux( + slug="rockylinux-8-arm64", + display_name="Rocky Linux 8 Arm64", + arch="arm64", + ), + Linux(slug="rockylinux-9", display_name="Rocky Linux 9", arch="x86_64"), + Linux( + slug="rockylinux-9-arm64", + display_name="Rocky Linux 9 Arm64", + arch="arm64", + ), + Linux(slug="amazonlinux-2", display_name="Amazon Linux 2", arch="x86_64"), + Linux( + slug="amazonlinux-2-arm64", + display_name="Amazon Linux 2 Arm64", + arch="arm64", + ), + Linux( + slug="amazonlinux-2023", + display_name="Amazon Linux 2023", + arch="x86_64", + ), + Linux( + slug="amazonlinux-2023-arm64", + display_name="Amazon Linux 2023 Arm64", + arch="arm64", + ), + Linux(slug="archlinux-lts", display_name="Arch Linux LTS", arch="x86_64"), + Linux(slug="centos-7", display_name="CentOS 7", arch="x86_64"), + Linux(slug="debian-11", display_name="Debian 11", arch="x86_64"), + Linux(slug="debian-11-arm64", display_name="Debian 11 Arm64", arch="arm64"), + Linux(slug="debian-12", display_name="Debian 12", arch="x86_64"), + Linux(slug="debian-12-arm64", display_name="Debian 12 Arm64", arch="arm64"), + Linux(slug="fedora-39", display_name="Fedora 39", arch="x86_64"), + Linux(slug="opensuse-15", display_name="Opensuse 15", arch="x86_64"), + Linux( + slug="photonos-4", + display_name="Photon OS 4", + arch="x86_64", + fips=True, + ), + Linux( + slug="photonos-4-arm64", + display_name="Photon OS 4 Arm64", + arch="arm64", + fips=True, + ), + Linux( + slug="photonos-5", + display_name="Photon OS 5", + arch="x86_64", + fips=True, + ), + Linux( + slug="photonos-5-arm64", + display_name="Photon OS 5 Arm64", + arch="arm64", + fips=True, + ), + Linux(slug="ubuntu-20.04", display_name="Ubuntu 20.04", arch="x86_64"), + Linux( + slug="ubuntu-20.04-arm64", + display_name="Ubuntu 20.04 Arm64", + arch="arm64", + ), + Linux(slug="ubuntu-22.04", display_name="Ubuntu 22.04", arch="x86_64"), + Linux( + slug="ubuntu-22.04-arm64", + display_name="Ubuntu 22.04 Arm64", + arch="arm64", + ), + ], + "macos": [ + MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"), + MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"), + MacOS( + slug="macos-13-arm64", + display_name="macOS 13 Arm64", + arch="arm64", + runner="macos-13-xlarge", + ), + ], + "windows": [ + Windows(slug="windows-2016", display_name="Windows 2016", arch="amd64"), + Windows(slug="windows-2019", display_name="Windows 2019", arch="amd64"), + Windows(slug="windows-2022", display_name="Windows 2022", arch="amd64"), + ], + } +) # Define the command group cgroup = command_group( @@ -97,114 +178,7 @@ def generate_workflows(ctx: Context): "test-pkg-downloads": True, }, }, - "Test Package Downloads": { - "template": "test-package-downloads-action.yml", - }, - "Build CI Deps": { - "template": "build-deps-ci-action.yml", - }, } - test_salt_listing = PlatformDefinitions( - { - "linux": [ - Linux(slug="rockylinux-8", display_name="Rocky Linux 8", arch="x86_64"), - Linux( - slug="rockylinux-8-arm64", - display_name="Rocky Linux 8 Arm64", - arch="arm64", - ), - Linux(slug="rockylinux-9", display_name="Rocky Linux 9", arch="x86_64"), - Linux( - slug="rockylinux-9-arm64", - display_name="Rocky Linux 9 Arm64", - arch="arm64", - ), - Linux( - slug="amazonlinux-2", display_name="Amazon Linux 2", arch="x86_64" - ), - Linux( - slug="amazonlinux-2-arm64", - display_name="Amazon Linux 2 Arm64", - arch="arm64", - ), - Linux( - slug="amazonlinux-2023", - display_name="Amazon Linux 2023", - arch="x86_64", - ), - Linux( - slug="amazonlinux-2023-arm64", - display_name="Amazon Linux 2023 Arm64", - arch="arm64", - ), - Linux( - slug="archlinux-lts", display_name="Arch Linux LTS", arch="x86_64" - ), - Linux(slug="centos-7", display_name="CentOS 7", arch="x86_64"), - Linux(slug="debian-11", display_name="Debian 11", arch="x86_64"), - Linux( - slug="debian-11-arm64", display_name="Debian 11 Arm64", arch="arm64" - ), - Linux(slug="debian-12", display_name="Debian 12", arch="x86_64"), - Linux( - slug="debian-12-arm64", display_name="Debian 12 Arm64", arch="arm64" - ), - Linux(slug="fedora-39", display_name="Fedora 39", arch="x86_64"), - Linux(slug="opensuse-15", display_name="Opensuse 15", arch="x86_64"), - Linux( - slug="photonos-4", - display_name="Photon OS 4", - arch="x86_64", - fips=True, - ), - Linux( - slug="photonos-4-arm64", - display_name="Photon OS 4 Arm64", - arch="arm64", - fips=True, - ), - Linux( - slug="photonos-5", - display_name="Photon OS 5", - arch="x86_64", - fips=True, - ), - Linux( - slug="photonos-5-arm64", - display_name="Photon OS 5 Arm64", - arch="arm64", - fips=True, - ), - Linux(slug="ubuntu-20.04", display_name="Ubuntu 20.04", arch="x86_64"), - Linux( - slug="ubuntu-20.04-arm64", - display_name="Ubuntu 20.04 Arm64", - arch="arm64", - ), - Linux(slug="ubuntu-22.04", display_name="Ubuntu 22.04", arch="x86_64"), - Linux( - slug="ubuntu-22.04-arm64", - display_name="Ubuntu 22.04 Arm64", - arch="arm64", - ), - ], - "macos": [ - MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"), - MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"), - MacOS( - slug="macos-13-arm64", - display_name="macOS 13 Arm64", - arch="arm64", - runner="macos-13-xlarge", - ), - ], - "windows": [ - Windows(slug="windows-2016", display_name="Windows 2016", arch="amd64"), - Windows(slug="windows-2019", display_name="Windows 2019", arch="amd64"), - Windows(slug="windows-2022", display_name="Windows 2022", arch="amd64"), - ], - } - ) test_salt_pkg_listing = PlatformDefinitions( { @@ -391,105 +365,6 @@ def generate_workflows(ctx: Context): } ) - build_ci_deps_listing = { - "linux": [ - ("x86_64", "centos-7"), - ("arm64", "centos-7-arm64"), - ], - "macos": [ - ("x86_64", "macos-12"), - ("arm64", "macos-13-arm64"), - ], - "windows": [ - ("amd64", "windows-2022"), - ], - } - test_salt_pkg_downloads_listing = PlatformDefinitions( - { - "linux": [], - "macos": [], - "windows": [], - } - ) - rpm_slugs = ( - "rockylinux", - "amazonlinux", - "centos", - "fedora", - "photon", - ) - linux_skip_pkg_download_tests = ( - "archlinux-lts", - "opensuse-15", - "windows", - ) - for slug in sorted(tools.utils.get_golden_images()): - if slug.startswith(linux_skip_pkg_download_tests): - continue - if "arm64" in slug: - arch = "arm64" - else: - arch = "x86_64" - if slug.startswith(rpm_slugs) and arch == "arm64": - # While we maintain backwards compatible urls - test_salt_pkg_downloads_listing["linux"].append( - Linux( - slug=slug, - arch="aarch64", - pkg_type="package", - ) - ) - test_salt_pkg_downloads_listing["linux"].append( - Linux( - slug=slug, - arch=arch, - pkg_type="package", - ) - ) - if slug.startswith("ubuntu-22"): - test_salt_pkg_downloads_listing["linux"].append( - Linux( - slug=slug, - arch=arch, - pkg_type="onedir", - ) - ) - for mac in test_salt_listing["macos"]: - test_salt_pkg_downloads_listing["macos"].append( - MacOS( - slug=mac.slug, - arch=mac.arch, - display_name=mac.display_name, - pkg_type="package", - runner=mac.runner, - ) - ) - for mac in test_salt_listing["macos"][-1:]: - test_salt_pkg_downloads_listing["macos"].append( - MacOS( - slug=mac.slug, - arch=mac.arch, - display_name=mac.display_name, - pkg_type="onedir", - runner=mac.runner, - ) - ) - for win in test_salt_listing["windows"][-1:]: - for pkg_type in ("nsis", "msi", "onedir"): - test_salt_pkg_downloads_listing["windows"].append( - Windows( - slug=win.slug, - arch=win.arch, - display_name=win.display_name, - pkg_type=pkg_type, - ) - ) - - test_salt_pkg_downloads_needs_slugs = {"build-ci-deps"} - # for platform in test_salt_pkg_downloads_listing: - # for _, arch, _ in test_salt_pkg_downloads_listing[platform]: - # test_salt_pkg_downloads_needs_slugs.add("build-ci-deps") - build_rpms_listing = [] rpm_os_versions: dict[str, list[str]] = { "amazon": [], @@ -563,13 +438,8 @@ def generate_workflows(ctx: Context): "test_repo_needs": NeedsTracker(), "prepare_workflow_needs": NeedsTracker(), "build_repo_needs": NeedsTracker(), - "test_salt_listing": test_salt_listing, + "test_salt_listing": TEST_SALT_LISTING, "test_salt_pkg_listing": test_salt_pkg_listing, - "build_ci_deps_listing": build_ci_deps_listing, - "test_salt_pkg_downloads_listing": test_salt_pkg_downloads_listing, - "test_salt_pkg_downloads_needs_slugs": sorted( - test_salt_pkg_downloads_needs_slugs - ), "build_rpms_listing": build_rpms_listing, "build_debs_listing": build_debs_listing, } diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py index 3635c82d05b..7588a183eef 100644 --- a/tools/utils/__init__.py +++ b/tools/utils/__init__.py @@ -24,6 +24,11 @@ from rich.progress import ( TransferSpeedColumn, ) +if sys.version_info < (3, 11): + from typing_extensions import TypedDict +else: + from typing import TypedDict # pylint: disable=no-name-in-module + REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023" SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "test" @@ -42,10 +47,21 @@ class ExitCode(IntEnum): class OS: platform: str = attr.ib() slug: str = attr.ib() + arch: str = attr.ib() display_name: str = attr.ib(default=None) - arch: str = attr.ib(default=None) pkg_type: str = attr.ib(default=None) + @arch.default + def _default_arch(self): + return self._get_default_arch() + + def _get_default_arch(self): + if "aarch64" in self.slug: + return "arm64" + if "arm64" in self.slug: + return "arm64" + return "x86_64" + @attr.s(frozen=True, slots=True) class Linux(OS): @@ -67,6 +83,15 @@ class MacOS(OS): class Windows(OS): platform: str = attr.ib(default="windows") + def _get_default_arch(self): + return "amd64" + + +class PlatformDefinitions(TypedDict): + linux: list[Linux] + macos: list[MacOS] + windows: list[Windows] + def create_progress_bar(file_progress: bool = False, **kwargs): if file_progress: