From 798787e884694aa48d9703ce4348eadaea390f4d Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 21 Jun 2023 17:58:17 -0400 Subject: [PATCH 01/73] Lock to `immutables>0.16`, as that version has problems installing now --- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/lint.txt | 2 +- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/lint.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/lint.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/lint.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 3fc7facf84f..ed2c9899611 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -441,7 +441,7 @@ idna==2.8 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==6.0.0 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index b3552664df1..9868bef0f40 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -438,7 +438,7 @@ idna==3.2 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==6.0.0 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 3a99b696d2b..4cf6301ebb4 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -453,7 +453,7 @@ idna==2.8 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==4.8.1 # via diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index ec6face4f99..64f47a07f5b 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -452,7 +452,7 @@ idna==3.2 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==4.6.4 # via diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 7471be1d614..1a7eff61340 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -451,7 +451,7 @@ idna==2.8 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==4.8.1 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index f4d6e66b243..425922348d5 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -450,7 +450,7 @@ idna==3.2 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==4.6.4 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index e0f1d9f3189..5e044b7ac97 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -451,7 +451,7 @@ idna==2.8 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==6.0.0 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 5509a1c5feb..2cc6daf69c6 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -448,7 +448,7 @@ idna==3.2 # etcd3-py # requests # yarl -immutables==0.16 +immutables==0.19 # via contextvars importlib-metadata==6.0.0 # via -r requirements/static/pkg/linux.in From ef7082126da5d57075c8e06747c4e0d05b416a24 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 5 Jul 2023 09:37:12 +0100 Subject: [PATCH 02/73] Switch to Debian 12 to build packages. The latest cryptography 1.41.1 requires a version of rustc which is not available on Debian 11 Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 11 +++++ requirements/static/ci/py3.11/tools.txt | 58 +++++++++++++++++++++++++ 2 files changed, 69 insertions(+) create mode 100644 requirements/static/ci/py3.11/tools.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 23240c88e00..dd68eb47bed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1021,6 +1021,17 @@ repos: - --py-version=3.10 - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt - requirements/static/ci/tools.in + + - id: pip-tools-compile + alias: compile-ci-tools-3.11-requirements + name: Linux CI Py3.11 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.11/(tools|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.11 + - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - requirements/static/ci/tools.in # <---- Tools ----------------------------------------------------------------------------------------------------- # ----- Code Formatting -------------------------------------------------------------------------------------------> diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt new file mode 100644 index 00000000000..bdd05d1f2ee --- /dev/null +++ b/requirements/static/ci/py3.11/tools.txt @@ -0,0 +1,58 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.11/tools.txt --pip-args='--constraint=requirements/static/ci/py3.11/linux.txt' requirements/static/ci/tools.in +# +attrs==22.1.0 + # via + # -r requirements/static/ci/tools.in + # python-tools-scripts +boto3==1.21.46 + # via -r requirements/static/ci/tools.in +botocore==1.24.46 + # via + # boto3 + # s3transfer +certifi==2022.12.7 + # via requests +charset-normalizer==3.0.1 + # via requests +commonmark==0.9.1 + # via rich +idna==3.4 + # via requests +jinja2==3.1.2 + # via -r requirements/static/ci/tools.in +jmespath==1.0.1 + # via + # boto3 + # botocore +markupsafe==2.1.2 + # via jinja2 +packaging==23.0 + # via -r requirements/static/ci/tools.in +pygments==2.13.0 + # via rich +python-dateutil==2.8.2 + # via botocore +python-tools-scripts==0.12.0 + # via -r requirements/static/ci/tools.in +pyyaml==6.0 + # via -r requirements/static/ci/tools.in +requests==2.31.0 + # via + # python-tools-scripts + # virustotal3 +rich==12.5.1 + # via python-tools-scripts +s3transfer==0.5.2 + # via boto3 +six==1.16.0 + # via python-dateutil +urllib3==1.26.12 + # via + # botocore + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools.in From 7cd5ad6a3193fed96c9928ce6789c05fd8e02520 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 14 Jul 2023 19:13:45 +0100 Subject: [PATCH 03/73] Update `.github/`, `cicd/`, `pkg/` and `tools/` to latest 3006.x Signed-off-by: Pedro Algarvio --- .../setup-python-tools-scripts/action.yml | 7 +- .github/workflows/build-deb-packages.yml | 12 +- .github/workflows/build-deps-onedir.yml | 24 +- .github/workflows/build-macos-packages.yml | 38 +- .github/workflows/build-rpm-packages.yml | 10 +- .github/workflows/build-salt-onedir.yml | 24 +- .github/workflows/build-windows-packages.yml | 43 +- .github/workflows/ci.yml | 236 +- .github/workflows/nightly.yml | 206 +- .github/workflows/release.yml | 331 +-- .github/workflows/scheduled.yml | 146 +- .github/workflows/staging.yml | 461 ++-- .../templates/build-packages.yml.jinja | 2 + .github/workflows/templates/ci.yml.jinja | 19 +- .github/workflows/templates/layout.yml.jinja | 67 +- .github/workflows/templates/nightly.yml.jinja | 145 ++ .github/workflows/templates/release.yml.jinja | 13 +- .github/workflows/templates/staging.yml.jinja | 13 +- .../test-pkg-repo-downloads.yml.jinja | 85 +- .../templates/test-salt-pkg.yml.jinja | 10 +- .../workflows/templates/test-salt.yml.jinja | 10 +- .../trigger-branch-workflows.yml.jinja | 3 +- .../workflow-requirements-check.yml.jinja | 2 +- .github/workflows/test-action-macos.yml | 50 +- .github/workflows/test-action.yml | 50 +- .../test-package-downloads-action-linux.yml | 14 +- .../test-package-downloads-action-macos.yml | 10 +- .../test-package-downloads-action-windows.yml | 12 +- .../workflows/test-packages-action-macos.yml | 16 +- .github/workflows/test-packages-action.yml | 18 +- cicd/amis.yml | 2 +- cicd/golden-images.json | 144 +- cicd/shared-gh-workflows-context.yml | 4 +- pkg/debian/rules | 8 +- pkg/debian/salt-cloud.postinst | 3 +- pkg/macos/build_python.sh | 65 +- pkg/rpm/salt.spec | 8 +- pkg/tests/conftest.py | 19 + pkg/tests/download/test_pkg_download.py | 345 ++- pkg/tests/integration/test_multi_minion.py | 127 ++ pkg/tests/integration/test_pkg.py | 7 - pkg/tests/integration/test_salt_user.py | 5 +- pkg/tests/support/helpers.py | 67 +- pkg/tests/upgrade/test_salt_upgrade.py | 1 + pkg/windows/build.ps1 | 19 +- pkg/windows/build_python.ps1 | 19 +- pkg/windows/clean.ps1 | 27 + pkg/windows/multi-minion.cmd | 5 + pkg/windows/multi-minion.ps1 | 363 ++++ pkg/windows/prep_salt.ps1 | 20 + tools/__init__.py | 2 + tools/ci.py | 393 +++- tools/pkg/build.py | 190 +- tools/pkg/repo.py | 1906 ----------------- tools/pkg/repo/__init__.py | 181 ++ tools/pkg/repo/create.py | 1038 +++++++++ tools/pkg/repo/publish.py | 653 ++++++ tools/utils.py | 141 +- tools/vm.py | 75 +- 59 files changed, 4660 insertions(+), 3254 deletions(-) create mode 100644 pkg/tests/integration/test_multi_minion.py create mode 100644 pkg/windows/multi-minion.cmd create mode 100644 pkg/windows/multi-minion.ps1 delete mode 100644 tools/pkg/repo.py create mode 100644 tools/pkg/repo/__init__.py create mode 100644 tools/pkg/repo/create.py create mode 100644 tools/pkg/repo/publish.py diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index dcd46feb2b0..72bcf3b1d37 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -33,7 +33,12 @@ runs: shell: bash working-directory: ${{ inputs.cwd }} run: | - python3 -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + (python3 -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1 + if [ $exitcode -eq 0 ]; then + python3 -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + else + python3 -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + fi - name: Get 'python-tools-scripts' Version id: get-version diff --git a/.github/workflows/build-deb-packages.yml b/.github/workflows/build-deb-packages.yml index 3823a620ed1..42f7f4eb6e7 100644 --- a/.github/workflows/build-deb-packages.yml +++ b/.github/workflows/build-deb-packages.yml @@ -8,6 +8,14 @@ on: type: string required: true description: The Salt version to set prior to building packages. + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv jobs: build: @@ -27,7 +35,7 @@ jobs: - src container: - image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-11 + image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12 steps: # Checkout here so we can easily use custom actions @@ -80,7 +88,7 @@ jobs: - name: Build Deb working-directory: pkgs/checkout/ run: | - tools pkg build deb ${{ + tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ matrix.source == 'onedir' && format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) || diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index ad788929cf9..6197ebc0333 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -23,21 +23,11 @@ on: type: string default: 0.12.3 description: The version of relenv to use - python-version-linux: + python-version: required: false type: string default: 3.10.9 description: The version of python to use with relenv - python-version-macos: - required: false - type: string - default: 3.10.9 - description: The version of python to use with relenv - python-version-windows: - required: false - type: string - description: The version of python to use with relenv on Windows - default: 3.10.9 env: RELENV_DATA: "${{ github.workspace }}/.relenv" @@ -67,7 +57,7 @@ jobs: arch: ${{ matrix.arch }} version: ${{ inputs.relenv-version }} cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-linux }} + python-version: ${{ inputs.python-version }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -77,7 +67,7 @@ jobs: with: platform: linux arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version-linux }}" + python-version: "${{ inputs.python-version }}" cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} @@ -108,7 +98,7 @@ jobs: arch: ${{ matrix.arch }} version: ${{ inputs.relenv-version }} cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-windows }} + python-version: ${{ inputs.python-version }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -118,7 +108,7 @@ jobs: with: platform: windows arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version-windows }}" + python-version: "${{ inputs.python-version }}" cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} @@ -148,7 +138,7 @@ jobs: arch: ${{ matrix.arch }} version: ${{ inputs.relenv-version }} cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-macos }} + python-version: ${{ inputs.python-version }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -158,5 +148,5 @@ jobs: with: platform: darwin arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version-macos }}" + python-version: "${{ inputs.python-version }}" cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-macos-packages.yml b/.github/workflows/build-macos-packages.yml index 9e07834fea0..285dca415cb 100644 --- a/.github/workflows/build-macos-packages.yml +++ b/.github/workflows/build-macos-packages.yml @@ -8,6 +8,14 @@ on: type: string required: true description: The Salt version to set prior to building packages. + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv sign-packages: type: boolean default: false @@ -27,6 +35,10 @@ jobs: matrix: arch: - x86_64 + source: + - onedir + - src + runs-on: - macos-12 steps: @@ -100,15 +112,31 @@ jobs: APPLE_ACCT: "${{ secrets.MAC_SIGN_APPLE_ACCT }}" APP_SPEC_PWD: "${{ secrets.MAC_SIGN_APP_SPEC_PWD }}" run: | - tools pkg build macos --onedir salt-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz \ - --salt-version ${{ inputs.salt-version }} ${{ - steps.check-pkg-sign.outputs.sign-pkgs == 'true' && '--sign' || '' - }} + tools pkg build macos --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ + matrix.source == 'onedir' && + format( + '--onedir salt-{0}-onedir-darwin-{1}.tar.xz --salt-version {0} {2}', + inputs.salt-version, + matrix.arch, + steps.check-pkg-sign.outputs.sign-pkgs == 'true' && '--sign' || '' + ) + || + format('--salt-version {0}', inputs.salt-version) + }} + + - name: Set Artifact Name + id: set-artifact-name + run: | + if [ "${{ matrix.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos-from-src" >> "$GITHUB_OUTPUT" + fi - name: Upload ${{ matrix.arch }} Package uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos + name: ${{ steps.set-artifact-name.outputs.artifact-name }} path: pkg/macos/salt-${{ inputs.salt-version }}-py3-*.pkg retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index 72464818307..c9ef9cc8abe 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -8,6 +8,14 @@ on: type: string required: true description: The Salt version to set prior to building packages. + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv env: COLUMNS: 190 @@ -68,7 +76,7 @@ jobs: - name: Build RPM run: | - tools pkg build rpm ${{ + tools pkg build rpm --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ matrix.source == 'onedir' && format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) || diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 2b1b758b42f..972176c8cee 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -23,21 +23,11 @@ on: type: string default: 0.12.3 description: The version of relenv to use - python-version-linux: + python-version: required: false type: string default: 3.10.9 description: The version of python to use with relenv - python-version-macos: - required: false - type: string - default: 3.10.9 - description: The version of python to use with relenv - python-version-windows: - required: false - type: string - description: The version of python to use with relenv on Windows - default: 3.10.9 env: RELENV_DATA: "${{ github.workspace }}/.relenv" @@ -77,7 +67,7 @@ jobs: arch: ${{ matrix.arch }} version: ${{ inputs.relenv-version }} cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-linux }} + python-version: ${{ inputs.python-version }} - name: Install Salt into Relenv Onedir uses: ./.github/actions/build-onedir-salt @@ -85,7 +75,7 @@ jobs: platform: linux arch: ${{ matrix.arch }} salt-version: "${{ inputs.salt-version }}" - python-version: "${{ inputs.python-version-linux }}" + python-version: "${{ inputs.python-version }}" cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} @@ -115,7 +105,7 @@ jobs: arch: ${{ matrix.arch }} version: ${{ inputs.relenv-version }} cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-windows }} + python-version: ${{ inputs.python-version }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -132,7 +122,7 @@ jobs: platform: windows arch: ${{ matrix.arch }} salt-version: "${{ inputs.salt-version }}" - python-version: "${{ inputs.python-version-windows }}" + python-version: "${{ inputs.python-version }}" cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} @@ -161,7 +151,7 @@ jobs: arch: ${{ matrix.arch }} version: ${{ inputs.relenv-version }} cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-macos }} + python-version: ${{ inputs.python-version }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -178,5 +168,5 @@ jobs: platform: darwin arch: ${{ matrix.arch }} salt-version: "${{ inputs.salt-version }}" - python-version: "${{ inputs.python-version-macos }}" + python-version: "${{ inputs.python-version }}" cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-windows-packages.yml b/.github/workflows/build-windows-packages.yml index b50d7cdc618..8b3173fe415 100644 --- a/.github/workflows/build-windows-packages.yml +++ b/.github/workflows/build-windows-packages.yml @@ -8,6 +8,14 @@ on: type: string required: true description: The Salt version to set prior to building packages + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv sign-packages: type: boolean default: false @@ -29,6 +37,10 @@ jobs: arch: - x86 - amd64 + source: + - onedir + - src + runs-on: - windows-latest env: @@ -95,15 +107,34 @@ jobs: - name: Build Windows Packages run: | - tools pkg build windows --onedir salt-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.zip ` - --salt-version ${{ inputs.salt-version }} --arch ${{ matrix.arch }} ${{ + tools pkg build windows --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ + matrix.source == 'onedir' && + format( + '--onedir salt-{0}-onedir-windows-{1}.zip --salt-version {0} --arch {1} {2}', + inputs.salt-version, + matrix.arch, steps.check-pkg-sign.outputs.sign-pkgs == 'true' && '--sign' || '' - }} + ) + || + format('--salt-version {0} --arch {1}', inputs.salt-version, matrix.arch) + }} - - name: Upload ${{ matrix.arch }} Packages + - name: Set Artifact Name + id: set-artifact-name + shell: bash + run: | + if [ "${{ matrix.source }}" != "src" ]; then + echo "artifact-name-nsis=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS" >> "$GITHUB_OUTPUT" + echo "artifact-name-msi=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI" >> "$GITHUB_OUTPUT" + else + echo "artifact-name-nsis=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS-from-src" >> "$GITHUB_OUTPUT" + echo "artifact-name-msi=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI-from-src" >> "$GITHUB_OUTPUT" + fi + + - name: Upload ${{ matrix.arch }} NSIS Packages uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS + name: ${{ steps.set-artifact-name.outputs.artifact-name-nsis }} path: pkg/windows/build/Salt-*.exe retention-days: 7 if-no-files-found: error @@ -111,7 +142,7 @@ jobs: - name: Upload ${{ matrix.arch }} MSI Package uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI + name: ${{ steps.set-artifact-name.outputs.artifact-name-msi }} path: pkg/windows/build/Salt-*.msi retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a18e21fcc5d..b525d11a150 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,9 +3,16 @@ --- name: CI run-name: "CI (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('{0}: {1}', startsWith(github.event.ref, 'refs/tags') && 'tag' || 'branch', github.ref_name) }})" + on: push: {} - pull_request: {} + pull_request: + types: + - labeled + - unlabeled + - opened + - reopened + - synchronize env: COLUMNS: 190 @@ -28,7 +35,7 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} @@ -39,6 +46,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - uses: actions/checkout@v3 with: @@ -132,6 +140,11 @@ jobs: run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -139,35 +152,13 @@ jobs: salt-version: "" validate-version: true - - name: Get Pull Number - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-number - uses: ./.github/actions/get-pull-number - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - sha: ${{ github.sha }} - pull-number: ${{ github.event.pull_request.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Request - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-request - uses: ./.github/actions/get-pull-request - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - pull-number: ${{ steps.get-pull-number.outputs.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Labels - if: ${{ github.event_name == 'pull_request' }} + - name: Get Pull Request Test Labels id: get-pull-labels - uses: ./.github/actions/get-pull-labels - with: - pull-request: ${{ steps.get-pull-request.outputs.pull-request }} + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} - name: Write Changed Files To A Local File run: @@ -213,10 +204,18 @@ jobs: run: | tools ci get-releases + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Salt Releases run: | echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' - name: Define Testrun id: define-testrun @@ -238,11 +237,6 @@ jobs: with: name: testrun-changed-files.txt path: testrun-changed-files.txt - - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -266,8 +260,7 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: @@ -401,7 +394,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 @@ -449,9 +442,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-salt-onedir: name: Build Salt Onedir @@ -467,9 +458,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-rpm-pkgs: name: Build RPM Packages @@ -480,6 +469,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-deb-pkgs: name: Build DEB Packages @@ -490,6 +481,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-windows-pkgs: name: Build Windows Packages @@ -500,6 +493,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-macos-pkgs: name: Build macOS Packages @@ -510,6 +505,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Tests @@ -525,8 +522,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centos-7-pkg-tests: name: CentOS 7 Package Tests @@ -542,8 +540,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-8-pkg-tests: name: CentOS Stream 8 Package Tests @@ -559,8 +558,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-9-pkg-tests: name: CentOS Stream 9 Package Tests @@ -576,8 +576,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-10-pkg-tests: name: Debian 10 Package Tests @@ -593,8 +594,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-pkg-tests: name: Debian 11 Package Tests @@ -610,8 +612,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Tests @@ -627,8 +630,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-3-pkg-tests: name: Photon OS 3 Package Tests @@ -644,8 +648,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-4-pkg-tests: name: Photon OS 4 Package Tests @@ -661,8 +666,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Tests @@ -678,8 +684,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Tests @@ -695,8 +702,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Tests @@ -712,8 +720,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Tests @@ -729,8 +738,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} macos-12-pkg-tests: name: macOS 12 Package Tests @@ -746,8 +756,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Tests @@ -763,8 +774,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Tests @@ -780,8 +792,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Tests @@ -797,8 +810,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Tests @@ -814,8 +828,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Tests @@ -831,8 +846,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: name: Windows 2022 MSI Package Tests @@ -848,8 +864,9 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: name: Windows 2016 @@ -866,8 +883,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} windows-2019: @@ -885,8 +901,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} windows-2022: @@ -904,8 +919,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} macos-12: @@ -923,8 +937,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} almalinux-8: @@ -942,8 +955,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} almalinux-9: @@ -961,8 +973,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} amazonlinux-2: @@ -980,8 +991,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} archlinux-lts: @@ -999,8 +1009,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} centos-7: @@ -1018,8 +1027,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} centosstream-8: @@ -1037,8 +1045,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} centosstream-9: @@ -1056,8 +1063,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} debian-10: @@ -1075,8 +1081,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} debian-11: @@ -1094,8 +1099,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} debian-11-arm64: @@ -1113,27 +1117,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} - skip-junit-reports: ${{ github.event_name == 'pull_request' }} - - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} fedora-37: @@ -1151,8 +1135,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} fedora-38: @@ -1170,8 +1153,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} opensuse-15: @@ -1189,8 +1171,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} photonos-3: @@ -1208,8 +1189,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} photonos-4: @@ -1227,8 +1207,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} ubuntu-2004: @@ -1246,8 +1225,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} ubuntu-2004-arm64: @@ -1265,8 +1243,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} ubuntu-2204: @@ -1284,8 +1261,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} ubuntu-2204-arm64: @@ -1303,8 +1279,7 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} set-pipeline-exit-status: @@ -1312,7 +1287,7 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow - pre-commit @@ -1334,7 +1309,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 8291efe30fa..73876713240 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -38,7 +38,7 @@ jobs: workflow-requirements: name: Check Workflow Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: requirements-met: ${{ steps.check-requirements.outputs.requirements-met }} steps: @@ -65,8 +65,7 @@ jobs: trigger-branch-nightly-builds: name: Trigger Branch Workflows if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements steps: @@ -78,7 +77,7 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} if: ${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }} needs: - workflow-requirements @@ -92,6 +91,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - uses: actions/checkout@v3 with: @@ -185,6 +185,11 @@ jobs: run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -192,35 +197,13 @@ jobs: salt-version: "" validate-version: true - - name: Get Pull Number - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-number - uses: ./.github/actions/get-pull-number - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - sha: ${{ github.sha }} - pull-number: ${{ github.event.pull_request.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Request - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-request - uses: ./.github/actions/get-pull-request - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - pull-number: ${{ steps.get-pull-number.outputs.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Labels - if: ${{ github.event_name == 'pull_request' }} + - name: Get Pull Request Test Labels id: get-pull-labels - uses: ./.github/actions/get-pull-labels - with: - pull-request: ${{ steps.get-pull-request.outputs.pull-request }} + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} - name: Write Changed Files To A Local File run: @@ -266,10 +249,18 @@ jobs: run: | tools ci get-releases + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Salt Releases run: | echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' - name: Define Testrun id: define-testrun @@ -291,11 +282,6 @@ jobs: with: name: testrun-changed-files.txt path: testrun-changed-files.txt - - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -319,8 +305,7 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: @@ -459,7 +444,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 @@ -507,9 +492,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-salt-onedir: name: Build Salt Onedir @@ -525,9 +508,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-rpm-pkgs: name: Build RPM Packages @@ -538,6 +519,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-deb-pkgs: name: Build DEB Packages @@ -548,6 +531,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-windows-pkgs: name: Build Windows Packages @@ -558,6 +543,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" environment: nightly sign-packages: false secrets: inherit @@ -571,6 +558,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" environment: nightly sign-packages: true secrets: inherit @@ -591,6 +580,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centos-7-pkg-tests: name: CentOS 7 Package Tests @@ -608,6 +598,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-8-pkg-tests: name: CentOS Stream 8 Package Tests @@ -625,6 +616,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-9-pkg-tests: name: CentOS Stream 9 Package Tests @@ -642,6 +634,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-10-pkg-tests: name: Debian 10 Package Tests @@ -659,6 +652,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-pkg-tests: name: Debian 11 Package Tests @@ -676,6 +670,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Tests @@ -693,6 +688,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-3-pkg-tests: name: Photon OS 3 Package Tests @@ -710,6 +706,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-4-pkg-tests: name: Photon OS 4 Package Tests @@ -727,6 +724,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Tests @@ -744,6 +742,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Tests @@ -761,6 +760,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Tests @@ -778,6 +778,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Tests @@ -795,6 +796,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} macos-12-pkg-tests: name: macOS 12 Package Tests @@ -812,6 +814,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Tests @@ -829,6 +832,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Tests @@ -846,6 +850,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Tests @@ -863,6 +868,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Tests @@ -880,6 +886,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Tests @@ -897,6 +904,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: name: Windows 2022 MSI Package Tests @@ -914,6 +922,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: name: Windows 2016 @@ -930,7 +939,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -949,7 +957,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -968,7 +975,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -987,7 +993,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1006,7 +1011,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1025,7 +1029,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1044,7 +1047,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1063,7 +1065,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1082,7 +1083,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1101,7 +1101,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1120,7 +1119,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1139,7 +1137,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1158,7 +1155,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1177,26 +1173,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: false - skip-junit-reports: false - - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1215,7 +1191,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1234,7 +1209,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1253,7 +1227,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1272,7 +1245,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1291,7 +1263,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1310,7 +1281,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1329,7 +1299,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1348,7 +1317,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1367,7 +1335,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -2030,7 +1997,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 @@ -2080,7 +2046,8 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + environment: nightly needs: - workflow-requirements - trigger-branch-nightly-builds @@ -2116,6 +2083,65 @@ jobs: id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + - name: Notify Slack + id: slack + if: always() + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + { + "attachments": [ + { + "fallback": "${{ github.workflow }} Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}", + "color": "${{ steps.get-workflow-info.outputs.conclusion != 'success' && 'ff3d00' || '00e676' }}", + "fields": [ + { + "title": "Workflow", + "short": true, + "value": "${{ github.workflow }}", + "type": "mrkdwn" + }, + { + "title": "Workflow Run", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>", + "type": "mrkdwn" + }, + { + "title": "Branch", + "short": true, + "value": "${{ github.ref_name }}", + "type": "mrkdwn" + }, + { + "title": "Commit", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>", + "type": "mrkdwn" + }, + { + "title": "Attempt", + "short": true, + "value": "${{ github.run_attempt }}", + "type": "mrkdwn" + }, + { + "title": "Status", + "short": true, + "value": "${{ steps.get-workflow-info.outputs.conclusion }}", + "type": "mrkdwn" + } + ], + "author_name": "${{ github.event.sender.login }}", + "author_link": "${{ github.event.sender.html_url }}", + "author_icon": "${{ github.event.sender.avatar_url }}" + } + ] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0dd338bdd8a..28ec5fa1366 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -35,7 +35,7 @@ jobs: check-requirements: name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: release-check steps: - name: Check For Admin Permission @@ -44,17 +44,6 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run runs-on: @@ -231,8 +220,8 @@ jobs: run: | tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }} - almalinux-8-pkg-download-tests: - name: Test Alma Linux 8 Package Downloads + almalinux-8-package-download-tests: + name: Test Alma Linux 8 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -248,10 +237,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - almalinux-8-arm64-pkg-download-tests: - name: Test Alma Linux 8 Arm64 Package Downloads + almalinux-8-arm64-package-download-tests: + name: Test Alma Linux 8 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -267,10 +257,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - almalinux-9-pkg-download-tests: - name: Test Alma Linux 9 Package Downloads + almalinux-9-package-download-tests: + name: Test Alma Linux 9 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -286,10 +277,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - almalinux-9-arm64-pkg-download-tests: - name: Test Alma Linux 9 Arm64 Package Downloads + almalinux-9-arm64-package-download-tests: + name: Test Alma Linux 9 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -305,10 +297,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - amazonlinux-2-pkg-download-tests: - name: Test Amazon Linux 2 Package Downloads + amazonlinux-2-package-download-tests: + name: Test Amazon Linux 2 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -324,10 +317,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - amazonlinux-2-arm64-pkg-download-tests: - name: Test Amazon Linux 2 Arm64 Package Downloads + amazonlinux-2-arm64-package-download-tests: + name: Test Amazon Linux 2 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -343,10 +337,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centos-7-pkg-download-tests: - name: Test CentOS 7 Package Downloads + centos-7-package-download-tests: + name: Test CentOS 7 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -362,10 +357,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centos-7-arm64-pkg-download-tests: - name: Test CentOS 7 Arm64 Package Downloads + centos-7-arm64-package-download-tests: + name: Test CentOS 7 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -381,10 +377,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-8-pkg-download-tests: - name: Test CentOS Stream 8 Package Downloads + centosstream-8-package-download-tests: + name: Test CentOS Stream 8 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -400,10 +397,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-8-arm64-pkg-download-tests: - name: Test CentOS Stream 8 Arm64 Package Downloads + centosstream-8-arm64-package-download-tests: + name: Test CentOS Stream 8 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -419,10 +417,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-9-pkg-download-tests: - name: Test CentOS Stream 9 Package Downloads + centosstream-9-package-download-tests: + name: Test CentOS Stream 9 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -438,10 +437,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-9-arm64-pkg-download-tests: - name: Test CentOS Stream 9 Arm64 Package Downloads + centosstream-9-arm64-package-download-tests: + name: Test CentOS Stream 9 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -457,10 +457,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - debian-10-pkg-download-tests: - name: Test Debian 10 Package Downloads + debian-10-package-download-tests: + name: Test Debian 10 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -476,10 +477,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - debian-11-pkg-download-tests: - name: Test Debian 11 Package Downloads + debian-11-package-download-tests: + name: Test Debian 11 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -495,10 +497,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - debian-11-arm64-pkg-download-tests: - name: Test Debian 11 Arm64 Package Downloads + debian-11-arm64-package-download-tests: + name: Test Debian 11 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -514,48 +517,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-36-pkg-download-tests: - name: Test Fedora 36 Package Downloads - if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - needs: - - prepare-workflow - - publish-repositories - - download-onedir-artifact - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36 - platform: linux - arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: release - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - - fedora-36-arm64-pkg-download-tests: - name: Test Fedora 36 Arm64 Package Downloads - if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - needs: - - prepare-workflow - - publish-repositories - - download-onedir-artifact - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36-arm64 - platform: linux - arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: release - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - - fedora-37-pkg-download-tests: - name: Test Fedora 37 Package Downloads + fedora-37-package-download-tests: + name: Test Fedora 37 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -571,10 +537,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-37-arm64-pkg-download-tests: - name: Test Fedora 37 Arm64 Package Downloads + fedora-37-arm64-package-download-tests: + name: Test Fedora 37 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -590,10 +557,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-38-pkg-download-tests: - name: Test Fedora 38 Package Downloads + fedora-38-package-download-tests: + name: Test Fedora 38 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -609,10 +577,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-38-arm64-pkg-download-tests: - name: Test Fedora 38 Arm64 Package Downloads + fedora-38-arm64-package-download-tests: + name: Test Fedora 38 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -628,10 +597,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - photonos-3-pkg-download-tests: - name: Test Photon OS 3 Package Downloads + photonos-3-package-download-tests: + name: Test Photon OS 3 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -647,10 +617,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - photonos-4-pkg-download-tests: - name: Test Photon OS 4 Package Downloads + photonos-4-package-download-tests: + name: Test Photon OS 4 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -666,10 +637,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2004-pkg-download-tests: - name: Test Ubuntu 20.04 Package Downloads + ubuntu-2004-package-download-tests: + name: Test Ubuntu 20.04 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -685,10 +657,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2004-arm64-pkg-download-tests: - name: Test Ubuntu 20.04 Arm64 Package Downloads + ubuntu-2004-arm64-package-download-tests: + name: Test Ubuntu 20.04 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -704,10 +677,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2204-pkg-download-tests: - name: Test Ubuntu 22.04 Package Downloads + ubuntu-2204-package-download-tests: + name: Test Ubuntu 22.04 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -723,10 +697,11 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2204-arm64-pkg-download-tests: - name: Test Ubuntu 22.04 Arm64 Package Downloads + ubuntu-2204-arm64-package-download-tests: + name: Test Ubuntu 22.04 Arm64 package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: - prepare-workflow @@ -742,9 +717,50 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - macos-12-pkg-download-tests: + ubuntu-2204-onedir-download-tests: + name: Test Ubuntu 22.04 onedir Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: onedir + secrets: inherit + + ubuntu-2204-arm64-onedir-download-tests: + name: Test Ubuntu 22.04 Arm64 onedir Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: onedir + secrets: inherit + + macos-12-Package-download-tests: name: Test macOS 12 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} needs: @@ -761,9 +777,30 @@ jobs: environment: release skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - windows-2022-nsis-amd64-pkg-download-tests: + macos-12-Onedir-download-tests: + name: Test macOS 12 Onedir Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-macos.yml + with: + distro-slug: macos-12 + platform: darwin + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: onedir + secrets: inherit + + windows-2022-NSIS-amd64-download-tests: if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} name: Test Windows 2022 amd64 NSIS Package Downloads needs: @@ -775,7 +812,7 @@ jobs: distro-slug: windows-2022 platform: windows arch: amd64 - pkg-type: NSIS + pkg-type: nsis cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release @@ -783,7 +820,7 @@ jobs: latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit - windows-2022-msi-amd64-pkg-download-tests: + windows-2022-MSI-amd64-download-tests: if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} name: Test Windows 2022 amd64 MSI Package Downloads needs: @@ -795,7 +832,27 @@ jobs: distro-slug: windows-2022 platform: windows arch: amd64 - pkg-type: MSI + pkg-type: msi + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + windows-2022-Onedir-amd64-download-tests: + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + name: Test Windows 2022 amd64 Onedir Package Downloads + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + pkg-type: onedir cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release @@ -814,36 +871,38 @@ jobs: - prepare-workflow - backup - publish-repositories - - almalinux-8-pkg-download-tests - - almalinux-8-arm64-pkg-download-tests - - almalinux-9-pkg-download-tests - - almalinux-9-arm64-pkg-download-tests - - amazonlinux-2-pkg-download-tests - - amazonlinux-2-arm64-pkg-download-tests - - centos-7-pkg-download-tests - - centos-7-arm64-pkg-download-tests - - centosstream-8-pkg-download-tests - - centosstream-8-arm64-pkg-download-tests - - centosstream-9-pkg-download-tests - - centosstream-9-arm64-pkg-download-tests - - debian-10-pkg-download-tests - - debian-11-pkg-download-tests - - debian-11-arm64-pkg-download-tests - - fedora-36-pkg-download-tests - - fedora-36-arm64-pkg-download-tests - - fedora-37-pkg-download-tests - - fedora-37-arm64-pkg-download-tests - - fedora-38-pkg-download-tests - - fedora-38-arm64-pkg-download-tests - - photonos-3-pkg-download-tests - - photonos-4-pkg-download-tests - - ubuntu-2004-pkg-download-tests - - ubuntu-2004-arm64-pkg-download-tests - - ubuntu-2204-pkg-download-tests - - ubuntu-2204-arm64-pkg-download-tests - - macos-12-pkg-download-tests - - windows-2022-nsis-amd64-pkg-download-tests - - windows-2022-msi-amd64-pkg-download-tests + - almalinux-8-package-download-tests + - almalinux-8-arm64-package-download-tests + - almalinux-9-package-download-tests + - almalinux-9-arm64-package-download-tests + - amazonlinux-2-package-download-tests + - amazonlinux-2-arm64-package-download-tests + - centos-7-package-download-tests + - centos-7-arm64-package-download-tests + - centosstream-8-package-download-tests + - centosstream-8-arm64-package-download-tests + - centosstream-9-package-download-tests + - centosstream-9-arm64-package-download-tests + - debian-10-package-download-tests + - debian-11-package-download-tests + - debian-11-arm64-package-download-tests + - fedora-37-package-download-tests + - fedora-37-arm64-package-download-tests + - fedora-38-package-download-tests + - fedora-38-arm64-package-download-tests + - photonos-3-package-download-tests + - photonos-4-package-download-tests + - ubuntu-2004-package-download-tests + - ubuntu-2004-arm64-package-download-tests + - ubuntu-2204-package-download-tests + - ubuntu-2204-arm64-package-download-tests + - ubuntu-2204-onedir-download-tests + - ubuntu-2204-arm64-onedir-download-tests + - macos-12-Package-download-tests + - macos-12-Onedir-download-tests + - windows-2022-NSIS-amd64-download-tests + - windows-2022-MSI-amd64-download-tests + - windows-2022-Onedir-amd64-download-tests environment: release steps: - name: Clone The Salt Repository @@ -998,7 +1057,7 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - check-requirements - prepare-workflow diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index fda566fbb3e..f61782b8a93 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -28,7 +28,7 @@ jobs: workflow-requirements: name: Check Workflow Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: requirements-met: ${{ steps.check-requirements.outputs.requirements-met }} steps: @@ -55,8 +55,7 @@ jobs: trigger-branch-scheduled-builds: name: Trigger Branch Workflows if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements steps: @@ -68,7 +67,7 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} if: ${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }} needs: - workflow-requirements @@ -82,6 +81,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - uses: actions/checkout@v3 with: @@ -175,6 +175,11 @@ jobs: run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -182,35 +187,13 @@ jobs: salt-version: "" validate-version: true - - name: Get Pull Number - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-number - uses: ./.github/actions/get-pull-number - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - sha: ${{ github.sha }} - pull-number: ${{ github.event.pull_request.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Request - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-request - uses: ./.github/actions/get-pull-request - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - pull-number: ${{ steps.get-pull-number.outputs.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Labels - if: ${{ github.event_name == 'pull_request' }} + - name: Get Pull Request Test Labels id: get-pull-labels - uses: ./.github/actions/get-pull-labels - with: - pull-request: ${{ steps.get-pull-request.outputs.pull-request }} + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} - name: Write Changed Files To A Local File run: @@ -256,10 +239,18 @@ jobs: run: | tools ci get-releases + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Salt Releases run: | echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' - name: Define Testrun id: define-testrun @@ -281,11 +272,6 @@ jobs: with: name: testrun-changed-files.txt path: testrun-changed-files.txt - - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -309,8 +295,7 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: @@ -444,7 +429,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 @@ -492,9 +477,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-salt-onedir: name: Build Salt Onedir @@ -510,9 +493,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-rpm-pkgs: name: Build RPM Packages @@ -523,6 +504,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-deb-pkgs: name: Build DEB Packages @@ -533,6 +516,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-windows-pkgs: name: Build Windows Packages @@ -543,6 +528,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-macos-pkgs: name: Build macOS Packages @@ -553,6 +540,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Tests @@ -570,6 +559,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centos-7-pkg-tests: name: CentOS 7 Package Tests @@ -587,6 +577,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-8-pkg-tests: name: CentOS Stream 8 Package Tests @@ -604,6 +595,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-9-pkg-tests: name: CentOS Stream 9 Package Tests @@ -621,6 +613,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-10-pkg-tests: name: Debian 10 Package Tests @@ -638,6 +631,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-pkg-tests: name: Debian 11 Package Tests @@ -655,6 +649,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Tests @@ -672,6 +667,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-3-pkg-tests: name: Photon OS 3 Package Tests @@ -689,6 +685,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-4-pkg-tests: name: Photon OS 4 Package Tests @@ -706,6 +703,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Tests @@ -723,6 +721,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Tests @@ -740,6 +739,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Tests @@ -757,6 +757,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Tests @@ -774,6 +775,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} macos-12-pkg-tests: name: macOS 12 Package Tests @@ -791,6 +793,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Tests @@ -808,6 +811,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Tests @@ -825,6 +829,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Tests @@ -842,6 +847,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Tests @@ -859,6 +865,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Tests @@ -876,6 +883,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: name: Windows 2022 MSI Package Tests @@ -893,6 +901,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: name: Windows 2016 @@ -909,7 +918,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -928,7 +936,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -947,7 +954,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -966,7 +972,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -985,7 +990,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1004,7 +1008,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1023,7 +1026,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1042,7 +1044,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1061,7 +1062,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1080,7 +1080,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1099,7 +1098,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1118,7 +1116,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1137,7 +1134,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1156,26 +1152,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: false - skip-junit-reports: false - - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1194,7 +1170,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1213,7 +1188,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1232,7 +1206,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1251,7 +1224,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1270,7 +1242,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1289,7 +1260,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1308,7 +1278,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1327,7 +1296,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1346,7 +1314,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: false skip-junit-reports: false @@ -1355,7 +1322,7 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements - trigger-branch-scheduled-builds @@ -1379,7 +1346,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 53f5fd62454..c30b9877473 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -48,7 +48,7 @@ jobs: check-requirements: name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: staging-check steps: - name: Check For Admin Permission @@ -57,20 +57,9 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - check-requirements outputs: @@ -83,6 +72,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - uses: actions/checkout@v3 with: @@ -176,6 +166,11 @@ jobs: run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -183,35 +178,13 @@ jobs: salt-version: "${{ inputs.salt-version }}" validate-version: true - - name: Get Pull Number - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-number - uses: ./.github/actions/get-pull-number - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - sha: ${{ github.sha }} - pull-number: ${{ github.event.pull_request.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Request - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-request - uses: ./.github/actions/get-pull-request - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - pull-number: ${{ steps.get-pull-number.outputs.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Labels - if: ${{ github.event_name == 'pull_request' }} + - name: Get Pull Request Test Labels id: get-pull-labels - uses: ./.github/actions/get-pull-labels - with: - pull-request: ${{ steps.get-pull-request.outputs.pull-request }} + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} - name: Check Existing Releases env: @@ -263,10 +236,18 @@ jobs: run: | tools ci get-releases + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Salt Releases run: | echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' - name: Define Testrun id: define-testrun @@ -288,11 +269,6 @@ jobs: with: name: testrun-changed-files.txt path: testrun-changed-files.txt - - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -455,7 +431,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 @@ -503,9 +479,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-salt-onedir: name: Build Salt Onedir @@ -521,9 +495,7 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "0.12.3" - python-version-linux: "3.10.11" - python-version-macos: "3.10.11" - python-version-windows: "3.10.11" + python-version: "3.10.11" build-rpm-pkgs: name: Build RPM Packages @@ -534,6 +506,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-deb-pkgs: name: Build DEB Packages @@ -544,6 +518,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" build-windows-pkgs: name: Build Windows Packages @@ -554,6 +530,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" environment: staging sign-packages: ${{ inputs.sign-windows-packages }} secrets: inherit @@ -567,6 +545,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "0.12.3" + python-version: "3.10.11" environment: staging sign-packages: true secrets: inherit @@ -587,6 +567,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centos-7-pkg-tests: name: CentOS 7 Package Tests @@ -604,6 +585,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-8-pkg-tests: name: CentOS Stream 8 Package Tests @@ -621,6 +603,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} centosstream-9-pkg-tests: name: CentOS Stream 9 Package Tests @@ -638,6 +621,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-10-pkg-tests: name: Debian 10 Package Tests @@ -655,6 +639,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-pkg-tests: name: Debian 11 Package Tests @@ -672,6 +657,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Tests @@ -689,6 +675,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-3-pkg-tests: name: Photon OS 3 Package Tests @@ -706,6 +693,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-4-pkg-tests: name: Photon OS 4 Package Tests @@ -723,6 +711,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Tests @@ -740,6 +729,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Tests @@ -757,6 +747,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Tests @@ -774,6 +765,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Tests @@ -791,6 +783,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} macos-12-pkg-tests: name: macOS 12 Package Tests @@ -808,6 +801,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Tests @@ -825,6 +819,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Tests @@ -842,6 +837,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Tests @@ -859,6 +855,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Tests @@ -876,6 +873,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Tests @@ -893,6 +891,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: name: Windows 2022 MSI Package Tests @@ -910,6 +909,7 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: name: Windows 2016 @@ -926,7 +926,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -945,7 +944,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -964,7 +962,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -983,7 +980,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1002,7 +998,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1021,7 +1016,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1040,7 +1034,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1059,7 +1052,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1078,7 +1070,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1097,7 +1088,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1116,7 +1106,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1135,7 +1124,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1154,7 +1142,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1173,26 +1160,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: true - skip-junit-reports: true - - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1211,7 +1178,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1230,7 +1196,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1249,7 +1214,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1268,7 +1232,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1287,7 +1250,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1306,7 +1268,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1325,7 +1286,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1344,7 +1304,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -1363,7 +1322,6 @@ jobs: testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} skip-code-coverage: true skip-junit-reports: true @@ -2114,8 +2072,8 @@ jobs: retention-days: 7 if-no-files-found: error - almalinux-8-pkg-download-tests: - name: Test Alma Linux 8 Package Downloads + almalinux-8-package-download-tests: + name: Test Alma Linux 8 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2130,10 +2088,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - almalinux-8-arm64-pkg-download-tests: - name: Test Alma Linux 8 Arm64 Package Downloads + almalinux-8-arm64-package-download-tests: + name: Test Alma Linux 8 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2148,10 +2107,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - almalinux-9-pkg-download-tests: - name: Test Alma Linux 9 Package Downloads + almalinux-9-package-download-tests: + name: Test Alma Linux 9 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2166,10 +2126,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - almalinux-9-arm64-pkg-download-tests: - name: Test Alma Linux 9 Arm64 Package Downloads + almalinux-9-arm64-package-download-tests: + name: Test Alma Linux 9 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2184,10 +2145,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - amazonlinux-2-pkg-download-tests: - name: Test Amazon Linux 2 Package Downloads + amazonlinux-2-package-download-tests: + name: Test Amazon Linux 2 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2202,10 +2164,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - amazonlinux-2-arm64-pkg-download-tests: - name: Test Amazon Linux 2 Arm64 Package Downloads + amazonlinux-2-arm64-package-download-tests: + name: Test Amazon Linux 2 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2220,10 +2183,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centos-7-pkg-download-tests: - name: Test CentOS 7 Package Downloads + centos-7-package-download-tests: + name: Test CentOS 7 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2238,10 +2202,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centos-7-arm64-pkg-download-tests: - name: Test CentOS 7 Arm64 Package Downloads + centos-7-arm64-package-download-tests: + name: Test CentOS 7 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2256,10 +2221,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-8-pkg-download-tests: - name: Test CentOS Stream 8 Package Downloads + centosstream-8-package-download-tests: + name: Test CentOS Stream 8 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2274,10 +2240,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-8-arm64-pkg-download-tests: - name: Test CentOS Stream 8 Arm64 Package Downloads + centosstream-8-arm64-package-download-tests: + name: Test CentOS Stream 8 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2292,10 +2259,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-9-pkg-download-tests: - name: Test CentOS Stream 9 Package Downloads + centosstream-9-package-download-tests: + name: Test CentOS Stream 9 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2310,10 +2278,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - centosstream-9-arm64-pkg-download-tests: - name: Test CentOS Stream 9 Arm64 Package Downloads + centosstream-9-arm64-package-download-tests: + name: Test CentOS Stream 9 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2328,10 +2297,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - debian-10-pkg-download-tests: - name: Test Debian 10 Package Downloads + debian-10-package-download-tests: + name: Test Debian 10 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2346,10 +2316,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - debian-11-pkg-download-tests: - name: Test Debian 11 Package Downloads + debian-11-package-download-tests: + name: Test Debian 11 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2364,10 +2335,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - debian-11-arm64-pkg-download-tests: - name: Test Debian 11 Arm64 Package Downloads + debian-11-arm64-package-download-tests: + name: Test Debian 11 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2382,46 +2354,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-36-pkg-download-tests: - name: Test Fedora 36 Package Downloads - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - publish-repositories - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36 - platform: linux - arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: staging - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - - fedora-36-arm64-pkg-download-tests: - name: Test Fedora 36 Arm64 Package Downloads - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - publish-repositories - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36-arm64 - platform: linux - arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: staging - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - - fedora-37-pkg-download-tests: - name: Test Fedora 37 Package Downloads + fedora-37-package-download-tests: + name: Test Fedora 37 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2436,10 +2373,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-37-arm64-pkg-download-tests: - name: Test Fedora 37 Arm64 Package Downloads + fedora-37-arm64-package-download-tests: + name: Test Fedora 37 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2454,10 +2392,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-38-pkg-download-tests: - name: Test Fedora 38 Package Downloads + fedora-38-package-download-tests: + name: Test Fedora 38 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2472,10 +2411,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - fedora-38-arm64-pkg-download-tests: - name: Test Fedora 38 Arm64 Package Downloads + fedora-38-arm64-package-download-tests: + name: Test Fedora 38 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2490,10 +2430,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - photonos-3-pkg-download-tests: - name: Test Photon OS 3 Package Downloads + photonos-3-package-download-tests: + name: Test Photon OS 3 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2508,10 +2449,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - photonos-4-pkg-download-tests: - name: Test Photon OS 4 Package Downloads + photonos-4-package-download-tests: + name: Test Photon OS 4 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2526,10 +2468,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2004-pkg-download-tests: - name: Test Ubuntu 20.04 Package Downloads + ubuntu-2004-package-download-tests: + name: Test Ubuntu 20.04 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2544,10 +2487,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2004-arm64-pkg-download-tests: - name: Test Ubuntu 20.04 Arm64 Package Downloads + ubuntu-2004-arm64-package-download-tests: + name: Test Ubuntu 20.04 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2562,10 +2506,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2204-pkg-download-tests: - name: Test Ubuntu 22.04 Package Downloads + ubuntu-2204-package-download-tests: + name: Test Ubuntu 22.04 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2580,10 +2525,11 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - ubuntu-2204-arm64-pkg-download-tests: - name: Test Ubuntu 22.04 Arm64 Package Downloads + ubuntu-2204-arm64-package-download-tests: + name: Test Ubuntu 22.04 Arm64 package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2598,9 +2544,48 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - macos-12-pkg-download-tests: + ubuntu-2204-onedir-download-tests: + name: Test Ubuntu 22.04 onedir Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: onedir + secrets: inherit + + ubuntu-2204-arm64-onedir-download-tests: + name: Test Ubuntu 22.04 Arm64 onedir Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: onedir + secrets: inherit + + macos-12-Package-download-tests: name: Test macOS 12 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: @@ -2616,9 +2601,29 @@ jobs: environment: staging skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: package secrets: inherit - windows-2022-nsis-amd64-pkg-download-tests: + macos-12-Onedir-download-tests: + name: Test macOS 12 Onedir Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-macos.yml + with: + distro-slug: macos-12 + platform: darwin + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: onedir + secrets: inherit + + windows-2022-NSIS-amd64-download-tests: if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} name: Test Windows 2022 amd64 NSIS Package Downloads needs: @@ -2629,7 +2634,7 @@ jobs: distro-slug: windows-2022 platform: windows arch: amd64 - pkg-type: NSIS + pkg-type: nsis cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging @@ -2637,7 +2642,7 @@ jobs: latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit - windows-2022-msi-amd64-pkg-download-tests: + windows-2022-MSI-amd64-download-tests: if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} name: Test Windows 2022 amd64 MSI Package Downloads needs: @@ -2648,7 +2653,26 @@ jobs: distro-slug: windows-2022 platform: windows arch: amd64 - pkg-type: MSI + pkg-type: msi + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + windows-2022-Onedir-amd64-download-tests: + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + name: Test Windows 2022 amd64 Onedir Package Downloads + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + pkg-type: onedir cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging @@ -2676,7 +2700,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 @@ -2706,36 +2729,38 @@ jobs: - windows-2019-msi-pkg-tests - windows-2022-nsis-pkg-tests - windows-2022-msi-pkg-tests - - almalinux-8-pkg-download-tests - - almalinux-8-arm64-pkg-download-tests - - almalinux-9-pkg-download-tests - - almalinux-9-arm64-pkg-download-tests - - amazonlinux-2-pkg-download-tests - - amazonlinux-2-arm64-pkg-download-tests - - centos-7-pkg-download-tests - - centos-7-arm64-pkg-download-tests - - centosstream-8-pkg-download-tests - - centosstream-8-arm64-pkg-download-tests - - centosstream-9-pkg-download-tests - - centosstream-9-arm64-pkg-download-tests - - debian-10-pkg-download-tests - - debian-11-pkg-download-tests - - debian-11-arm64-pkg-download-tests - - fedora-36-pkg-download-tests - - fedora-36-arm64-pkg-download-tests - - fedora-37-pkg-download-tests - - fedora-37-arm64-pkg-download-tests - - fedora-38-pkg-download-tests - - fedora-38-arm64-pkg-download-tests - - photonos-3-pkg-download-tests - - photonos-4-pkg-download-tests - - ubuntu-2004-pkg-download-tests - - ubuntu-2004-arm64-pkg-download-tests - - ubuntu-2204-pkg-download-tests - - ubuntu-2204-arm64-pkg-download-tests - - macos-12-pkg-download-tests - - windows-2022-nsis-amd64-pkg-download-tests - - windows-2022-msi-amd64-pkg-download-tests + - almalinux-8-package-download-tests + - almalinux-8-arm64-package-download-tests + - almalinux-9-package-download-tests + - almalinux-9-arm64-package-download-tests + - amazonlinux-2-package-download-tests + - amazonlinux-2-arm64-package-download-tests + - centos-7-package-download-tests + - centos-7-arm64-package-download-tests + - centosstream-8-package-download-tests + - centosstream-8-arm64-package-download-tests + - centosstream-9-package-download-tests + - centosstream-9-arm64-package-download-tests + - debian-10-package-download-tests + - debian-11-package-download-tests + - debian-11-arm64-package-download-tests + - fedora-37-package-download-tests + - fedora-37-arm64-package-download-tests + - fedora-38-package-download-tests + - fedora-38-arm64-package-download-tests + - photonos-3-package-download-tests + - photonos-4-package-download-tests + - ubuntu-2004-package-download-tests + - ubuntu-2004-arm64-package-download-tests + - ubuntu-2204-package-download-tests + - ubuntu-2204-arm64-package-download-tests + - ubuntu-2204-onedir-download-tests + - ubuntu-2204-arm64-onedir-download-tests + - macos-12-Package-download-tests + - macos-12-Onedir-download-tests + - windows-2022-NSIS-amd64-download-tests + - windows-2022-MSI-amd64-download-tests + - windows-2022-Onedir-amd64-download-tests environment: staging runs-on: - self-hosted @@ -2788,7 +2813,7 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - check-requirements - prepare-workflow diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index 7646dd6e3e5..eb0df9b14de 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -16,6 +16,8 @@ uses: ./.github/workflows/build-<{ pkg_type }>-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + relenv-version: "<{ relenv_version }>" + python-version: "<{ python_version }>" <%- if pkg_type in ("macos", "windows") and gh_environment %> environment: <{ gh_environment }> <%- if pkg_type == "macos" %> diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 538c0f91a33..30a82d331eb 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -1,12 +1,6 @@ <%- extends 'layout.yml.jinja' %> <%- set pre_commit_version = "3.0.4" %> -<%- block on %> -on: - push: {} - pull_request: {} -<%- endblock on %> - <%- block jobs %> <{- super() }> @@ -58,8 +52,7 @@ on: - x86_64 <%- else %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} <%- endif %> needs: - prepare-workflow @@ -220,7 +213,7 @@ on: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 @@ -275,9 +268,7 @@ on: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "<{ relenv_version }>" - python-version-linux: "<{ python_version_linux }>" - python-version-macos: "<{ python_version_macos }>" - python-version-windows: "<{ python_version_windows }>" + python-version: "<{ python_version }>" <%- endif %> @@ -300,9 +291,7 @@ on: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "<{ relenv_version }>" - python-version-linux: "<{ python_version_linux }>" - python-version-macos: "<{ python_version_macos }>" - python-version-windows: "<{ python_version_windows }>" + python-version: "<{ python_version }>" <%- endif %> diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 4e0fa686e3e..96394731244 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -5,11 +5,10 @@ <%- set prepare_workflow_skip_pkg_test_suite = prepare_workflow_skip_pkg_test_suite|default("") %> <%- set prepare_workflow_skip_pkg_download_test_suite = prepare_workflow_skip_pkg_download_test_suite|default("") %> <%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %> -<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ github.event_name == 'pull_request' }}") %> +<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}") %> <%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %> <%- set gpg_key_id = "64CBBC8173D76B3F" %> <%- set prepare_actual_release = prepare_actual_release | default(False) %> -<%- set release_branches = ["master", "3006.x"] %> --- <%- block name %> name: <{ workflow_name }> @@ -72,7 +71,7 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} <%- if prepare_workflow_if_check %> if: <{ prepare_workflow_if_check }> <%- endif %> @@ -92,6 +91,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - uses: actions/checkout@v3 with: @@ -185,6 +185,11 @@ jobs: run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -192,35 +197,13 @@ jobs: salt-version: "<{ prepare_workflow_salt_version_input }>" validate-version: true - - name: Get Pull Number - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-number - uses: ./.github/actions/get-pull-number - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - sha: ${{ github.sha }} - pull-number: ${{ github.event.pull_request.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Request - if: ${{ github.event_name == 'pull_request' }} - id: get-pull-request - uses: ./.github/actions/get-pull-request - with: - owner: ${{ github.repository_owner }} - repo: ${{ github.event.repository.name }} - pull-number: ${{ steps.get-pull-number.outputs.number }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Get Pull Labels - if: ${{ github.event_name == 'pull_request' }} + - name: Get Pull Request Test Labels id: get-pull-labels - uses: ./.github/actions/get-pull-labels - with: - pull-request: ${{ steps.get-pull-request.outputs.pull-request }} + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} <%- if prepare_actual_release %> @@ -263,7 +246,9 @@ jobs: - name: Define Jobs id: define-jobs run: | - tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{ prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite }> ${{ github.event_name }} changed-files.json + tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{ + prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite + }> ${{ github.event_name }} changed-files.json - name: Check Defined Jobs run: | @@ -276,10 +261,18 @@ jobs: run: | tools ci get-releases + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Salt Releases run: | echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' - name: Define Testrun id: define-testrun @@ -301,11 +294,6 @@ jobs: with: name: testrun-changed-files.txt path: testrun-changed-files.txt - - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" <%- endblock prepare_workflow_job %> <%- endif %> @@ -316,7 +304,10 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + <%- if workflow_slug == "nightly" %> + environment: <{ workflow_slug }> + <%- endif %> needs: <%- for need in prepare_workflow_needs.iter(consume=True) %> - <{ need }> diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja index d78faa49c0a..7bd74c31da0 100644 --- a/.github/workflows/templates/nightly.yml.jinja +++ b/.github/workflows/templates/nightly.yml.jinja @@ -52,6 +52,81 @@ concurrency: <%- include "workflow-requirements-check.yml.jinja" %> <%- include "trigger-branch-workflows.yml.jinja" %> + {#- When we start using a slack app, we can update messages, not while using incoming webhooks + <%- if workflow_slug == "nightly" %> + + <%- do conclusion_needs.append('notify-slack') %> + notify-slack: + name: Notify Slack + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + environment: <{ gh_environment }> + needs: + <%- for need in prepare_workflow_needs.iter(consume=False) %> + - <{ need }> + <%- endfor %> + outputs: + update-ts: ${{ steps.slack.outputs.update-ts }} + steps: + - name: Notify Slack + id: slack + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + { + "attachments": [ + { + "color": "ffca28", + "fields": [ + { + "title": "Workflow", + "short": true, + "value": "${{ github.workflow }}", + "type": "mrkdwn" + }, + { + "title": "Workflow Run", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>", + "type": "mrkdwn" + }, + { + "title": "Branch", + "short": true, + "value": "${{ github.ref_name }}", + "type": "mrkdwn" + }, + { + "title": "Commit", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>", + "type": "mrkdwn" + }, + { + "title": "Attempt", + "short": true, + "value": "${{ github.run_attempt }}", + "type": "mrkdwn" + }, + { + "title": "Status", + "short": true, + "value": "running", + "type": "mrkdwn" + } + ], + "author_name": "${{ github.event.sender.login }}", + "author_link": "${{ github.event.sender.html_url }}", + "author_icon": "${{ github.event.sender.avatar_url }}" + } + ] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + + <%- endif %> + #} + <%- endblock pre_jobs %> <%- block jobs %> @@ -116,3 +191,73 @@ concurrency: tools pkg repo publish <{ gh_environment }> --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/ <%- endblock jobs %> + +<%- block set_pipeline_exit_status_extra_steps %> + + <%- if workflow_slug == "nightly" %> + + - name: Notify Slack + id: slack + if: always() + uses: slackapi/slack-github-action@v1.24.0 + with: + {#- When we start using a slack app, we can update messages, not while using incoming webhooks + update-ts: ${{ needs.notify-slack.outputs.update-ts }} + #} + payload: | + { + "attachments": [ + { + "fallback": "${{ github.workflow }} Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}", + "color": "${{ steps.get-workflow-info.outputs.conclusion != 'success' && 'ff3d00' || '00e676' }}", + "fields": [ + { + "title": "Workflow", + "short": true, + "value": "${{ github.workflow }}", + "type": "mrkdwn" + }, + { + "title": "Workflow Run", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>", + "type": "mrkdwn" + }, + { + "title": "Branch", + "short": true, + "value": "${{ github.ref_name }}", + "type": "mrkdwn" + }, + { + "title": "Commit", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>", + "type": "mrkdwn" + }, + { + "title": "Attempt", + "short": true, + "value": "${{ github.run_attempt }}", + "type": "mrkdwn" + }, + { + "title": "Status", + "short": true, + "value": "${{ steps.get-workflow-info.outputs.conclusion }}", + "type": "mrkdwn" + } + ], + "author_name": "${{ github.event.sender.login }}", + "author_link": "${{ github.event.sender.html_url }}", + "author_icon": "${{ github.event.sender.avatar_url }}" + } + ] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + + <%- endif %> + +<%- endblock set_pipeline_exit_status_extra_steps %> diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index ad651fcfaae..47f02f80f71 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -52,7 +52,7 @@ permissions: <{ job_name }>: <%- do prepare_workflow_needs.append(job_name) %> name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: <{ gh_environment }>-check steps: - name: Check For Admin Permission @@ -61,17 +61,6 @@ permissions: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index 548faa7a5e2..fdb1b5933f3 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -62,7 +62,7 @@ concurrency: <{ job_name }>: <%- do prepare_workflow_needs.append(job_name) %> name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: <{ gh_environment }>-check steps: - name: Check For Admin Permission @@ -71,17 +71,6 @@ concurrency: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %> diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index ac826f6e9fe..709219e0f42 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -1,41 +1,41 @@ <%- set linux_pkg_tests = ( - ("almalinux-8", "Alma Linux 8", "x86_64"), - ("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64"), - ("almalinux-9", "Alma Linux 9", "x86_64"), - ("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64"), - ("amazonlinux-2", "Amazon Linux 2", "x86_64"), - ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), - ("centos-7", "CentOS 7", "x86_64"), - ("centos-7-arm64", "CentOS 7 Arm64", "aarch64"), - ("centosstream-8", "CentOS Stream 8", "x86_64"), - ("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64"), - ("centosstream-9", "CentOS Stream 9", "x86_64"), - ("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64"), - ("debian-10", "Debian 10", "x86_64"), - ("debian-11", "Debian 11", "x86_64"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), - ("fedora-36", "Fedora 36", "x86_64"), - ("fedora-36-arm64", "Fedora 36 Arm64", "aarch64"), - ("fedora-37", "Fedora 37", "x86_64"), - ("fedora-37-arm64", "Fedora 37 Arm64", "aarch64"), - ("fedora-38", "Fedora 38", "x86_64"), - ("fedora-38-arm64", "Fedora 38 Arm64", "aarch64"), - ("photonos-3", "Photon OS 3", "x86_64"), - ("photonos-4", "Photon OS 4", "x86_64"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64") + ("almalinux-8", "Alma Linux 8", "x86_64", "package"), + ("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64", "package"), + ("almalinux-9", "Alma Linux 9", "x86_64", "package"), + ("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64", "package"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64", "package"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "package"), + ("centos-7", "CentOS 7", "x86_64", "package"), + ("centos-7-arm64", "CentOS 7 Arm64", "aarch64", "package"), + ("centosstream-8", "CentOS Stream 8", "x86_64", "package"), + ("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64", "package"), + ("centosstream-9", "CentOS Stream 9", "x86_64", "package"), + ("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64", "package"), + ("debian-10", "Debian 10", "x86_64", "package"), + ("debian-11", "Debian 11", "x86_64", "package"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "package"), + ("fedora-37", "Fedora 37", "x86_64", "package"), + ("fedora-37-arm64", "Fedora 37 Arm64", "aarch64", "package"), + ("fedora-38", "Fedora 38", "x86_64", "package"), + ("fedora-38-arm64", "Fedora 38 Arm64", "aarch64", "package"), + ("photonos-3", "Photon OS 3", "x86_64", "package"), + ("photonos-4", "Photon OS 4", "x86_64", "package"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "package"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "package"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "package"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "package"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "onedir"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "onedir") ) %> - <%- for slug, display_name, arch in linux_pkg_tests %> - <%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %> + <%- for slug, display_name, arch, pkg_type in linux_pkg_tests %> + <%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> - name: Test <{ display_name }> Package Downloads + name: Test <{ display_name }> <{ pkg_type }> Downloads <%- if gh_environment == "staging" %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> @@ -52,24 +52,26 @@ distro-slug: <{ slug }> platform: linux arch: <{ arch }> - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: <{ pkg_type.lower() }> secrets: inherit <%- endfor %> - <%- for slug, display_name, arch in ( - ("macos-12", "macOS 12", "x86_64"), + <%- for slug, display_name, arch, pkg_type in ( + ("macos-12", "macOS 12", "x86_64", "Package"), + ("macos-12", "macOS 12", "x86_64", "Onedir"), ) %> - <%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %> + <%- set job_name = "{}-{}-download-tests".format(slug.replace(".", ""), pkg_type) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> - name: Test <{ display_name }> Package Downloads + name: Test <{ display_name }> <{ pkg_type }> Downloads <%- if gh_environment == "staging" %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> @@ -86,20 +88,21 @@ distro-slug: <{ slug }> platform: darwin arch: <{ arch }> - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + pkg-type: <{ pkg_type.lower() }> secrets: inherit - <%- endfor %> + <%- endfor %> <%- for slug, display_name, arch in ( ("windows-2022", "Windows 2022", "amd64"), ) %> - <%- for pkg_type in ("NSIS", "MSI") %> - <%- set job_name = "{}-{}-{}-pkg-download-tests".format(slug.replace(".", ""), pkg_type.lower(), arch.lower()) %> + <%- for pkg_type in ("NSIS", "MSI", "Onedir") %> + <%- set job_name = "{}-{}-{}-download-tests".format(slug.replace(".", ""), pkg_type, arch.lower()) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> @@ -120,8 +123,8 @@ distro-slug: <{ slug }> platform: windows arch: <{ arch }> - pkg-type: <{ pkg_type }> - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }> + pkg-type: <{ pkg_type.lower() }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index 99fc85db4fb..bc0cd8afaf7 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -9,7 +9,6 @@ ("debian-10", "Debian 10", "x86_64", "deb"), ("debian-11", "Debian 11", "x86_64", "deb"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"), - ("fedora-36", "Fedora 36", "x86_64", "rpm"), ("fedora-37", "Fedora 37", "x86_64", "rpm"), ("fedora-38", "Fedora 38", "x86_64", "rpm"), ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"), @@ -51,9 +50,10 @@ arch: <{ arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: <{ pkg_type }> - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} <%- endfor %> @@ -76,9 +76,10 @@ arch: <{ arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} <%- endfor %> @@ -103,9 +104,10 @@ arch: <{ arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: <{ pkg_type }> - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} <%- endfor %> <%- endfor %> diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index 2eb0fb5e50e..d54ab8181ee 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -18,8 +18,7 @@ arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }> - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> @@ -42,8 +41,7 @@ arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }> - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> @@ -59,7 +57,6 @@ ("debian-10", "Debian 10", "x86_64"), ("debian-11", "Debian 11", "x86_64"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), - ("fedora-36", "Fedora 36", "x86_64"), ("fedora-37", "Fedora 37", "x86_64"), ("fedora-38", "Fedora 38", "x86_64"), ("opensuse-15", "Opensuse 15", "x86_64"), @@ -85,8 +82,7 @@ arch: <{ arch }> testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }> - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> diff --git a/.github/workflows/templates/trigger-branch-workflows.yml.jinja b/.github/workflows/templates/trigger-branch-workflows.yml.jinja index 30f48ffc048..24d0147b915 100644 --- a/.github/workflows/templates/trigger-branch-workflows.yml.jinja +++ b/.github/workflows/templates/trigger-branch-workflows.yml.jinja @@ -7,8 +7,7 @@ <%- do conclusion_needs.append(job_name) %> name: Trigger Branch Workflows if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements steps: diff --git a/.github/workflows/templates/workflow-requirements-check.yml.jinja b/.github/workflows/templates/workflow-requirements-check.yml.jinja index 419ee3f6f52..a18c13c69f5 100644 --- a/.github/workflows/templates/workflow-requirements-check.yml.jinja +++ b/.github/workflows/templates/workflow-requirements-check.yml.jinja @@ -4,7 +4,7 @@ <{ job_name }>: <%- do prepare_workflow_needs.append(job_name) %> name: Check Workflow Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: requirements-met: ${{ steps.check-requirements.outputs.requirements-met }} steps: diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index b8088c64522..fdede59b807 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -42,11 +42,6 @@ on: type: string description: The onedir package name to use default: salt - pull-labels: - required: false - type: string - description: List of all the pull labels - default: '["test:slow", "test:core"]' skip-code-coverage: required: false type: boolean @@ -113,7 +108,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'noxfile.py') }} - name: Download Onedir Tarball as an Artifact if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -226,7 +221,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true @@ -269,36 +264,9 @@ jobs: run: | sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin" - - name: Get Test Flags - id: get-test-flags - shell: bash - env: - PULL_LABELS: ${{ inputs.pull-labels }} - run: | - echo "$PULL_LABELS" - # shellcheck disable=SC2086 - no_fast_tests="$(jq -c '. | any(index("test:no-fast"))' <<< $PULL_LABELS)" - # shellcheck disable=SC2086 - slow_tests="$(jq -c '. | any(index("test:slow"))' <<< $PULL_LABELS)" - # shellcheck disable=SC2086 - core_tests="$(jq -c '. | any(index("test:core"))' <<< $PULL_LABELS)" - # shellcheck disable=SC2086 - flaky_jail_tests="$(jq -c '. | any(index("test:flaky-jail"))' <<< $PULL_LABELS)" - echo "$no_fast_tests" - echo "$slow_tests" - echo "$core_tests" - echo "$flaky_jail_tests" - # shellcheck disable=SC2086 - { - echo "no_fast_tests=$no_fast_tests"; - echo "slow_tests=$slow_tests"; - echo "core_tests=$core_tests"; - echo "flaky_jail_tests=$flaky_jail_tests"; - } >> "$GITHUB_OUTPUT" - - name: Run Fast/Changed Tests id: run-fast-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -315,7 +283,7 @@ jobs: - name: Run Slow/Changed Tests id: run-slow-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'false' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -332,7 +300,7 @@ jobs: - name: Run Core/Changed Tests id: run-core-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'false' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -349,7 +317,7 @@ jobs: - name: Run Fast Tests id: run-fast-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'false' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -365,7 +333,7 @@ jobs: - name: Run Slow Tests id: run-slow-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -381,7 +349,7 @@ jobs: - name: Run Core Tests id: run-core-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -397,7 +365,7 @@ jobs: - name: Run Flaky Tests id: run-flaky-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 0982e7446eb..d3de4b9258e 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -37,11 +37,6 @@ on: type: string description: The onedir package name to use default: salt - pull-labels: - required: false - type: string - description: List of all the pull labels - default: '["test:slow", "test:core"]' skip-code-coverage: required: false type: boolean @@ -116,7 +111,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} - name: Download Onedir Tarball as an Artifact if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -247,7 +242,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true @@ -303,36 +298,9 @@ jobs: --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} - - name: Get Test Flags - id: get-test-flags - shell: bash - env: - PULL_LABELS: ${{ inputs.pull-labels }} - run: | - echo "$PULL_LABELS" - # shellcheck disable=SC2086 - no_fast_tests="$(jq -c '. | any(index("test:no-fast"))' <<< $PULL_LABELS)" - # shellcheck disable=SC2086 - slow_tests="$(jq -c '. | any(index("test:slow"))' <<< $PULL_LABELS)" - # shellcheck disable=SC2086 - core_tests="$(jq -c '. | any(index("test:core"))' <<< $PULL_LABELS)" - # shellcheck disable=SC2086 - flaky_jail_tests="$(jq -c '. | any(index("test:flaky-jail"))' <<< $PULL_LABELS)" - echo "$no_fast_tests" - echo "$slow_tests" - echo "$core_tests" - echo "$flaky_jail_tests" - # shellcheck disable=SC2086 - { - echo "no_fast_tests=$no_fast_tests"; - echo "slow_tests=$slow_tests"; - echo "core_tests=$core_tests"; - echo "flaky_jail_tests=$flaky_jail_tests"; - } >> "$GITHUB_OUTPUT" - - name: Run Fast/Changed Tests id: run-fast-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ @@ -341,7 +309,7 @@ jobs: - name: Run Slow/Changed Tests id: run-slow-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'false' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ @@ -350,7 +318,7 @@ jobs: - name: Run Core/Changed Tests id: run-core-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'false' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ @@ -359,7 +327,7 @@ jobs: - name: Run Fast Tests id: run-fast-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'false' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ @@ -367,7 +335,7 @@ jobs: - name: Run Slow Tests id: run-slow-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ @@ -375,7 +343,7 @@ jobs: - name: Run Core Tests id: run-core-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ @@ -383,7 +351,7 @@ jobs: - name: Run Flaky Tests id: run-flaky-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }} + if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index ee67c4d4020..0481c1c0594 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -31,6 +31,10 @@ on: required: true type: string description: The latest salt release + pkg-type: + required: true + type: string + description: The type of artifact to download package-name: required: false type: string @@ -74,7 +78,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 @@ -180,7 +184,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true @@ -221,10 +225,11 @@ jobs: SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }} run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs - name: Run Package Download Tests @@ -238,10 +243,11 @@ jobs: SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs - name: Combine Coverage Reports diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index ec985efbcee..e24ffbeed8e 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -31,6 +31,10 @@ on: required: true type: string description: The latest salt release + pkg-type: + required: true + type: string + description: The type of artifact to download python-version: required: false type: string @@ -76,7 +80,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 @@ -180,7 +184,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true @@ -199,6 +203,7 @@ jobs: SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }} run: | sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs @@ -220,6 +225,7 @@ jobs: SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }} run: | sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index 10d4462e451..29ed67fe827 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -83,7 +83,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 @@ -189,7 +189,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true @@ -221,7 +221,6 @@ jobs: - name: Show System Info & Test Plan env: - INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" SALT_REPO_ARCH: ${{ inputs.arch }} LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" @@ -231,15 +230,15 @@ jobs: SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }} run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs - name: Run Package Download Tests env: - INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" SALT_REPO_ARCH: ${{ inputs.arch }} LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" @@ -249,10 +248,11 @@ jobs: SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ inputs.pkg-type }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs - name: Combine Coverage Reports diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index b7de16fb5ac..b517c53745c 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -27,6 +27,10 @@ on: required: true type: string description: Seed used to invalidate caches + testing-releases: + required: true + type: string + description: A JSON list of releases to test upgrades against python-version: required: false type: string @@ -79,7 +83,7 @@ jobs: - name: Generate Package Test Matrix id: generate-pkg-matrix run: | - tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} + tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} dependencies: name: Setup Test Dependencies @@ -96,7 +100,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} - name: Download Onedir Tarball as an Artifact if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -213,7 +217,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true @@ -230,7 +234,8 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.test-chunk }} + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.test-chunk }} \ + ${{ contains(matrix.test-chunk, 'upgrade') && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests env: @@ -242,7 +247,8 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.test-chunk }} + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.test-chunk }} \ + ${{ contains(matrix.test-chunk, 'upgrade') && format('--prev-version {0}', matrix.version) || ''}} - name: Fix file ownership run: | diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 71affc00877..3e31045c6f7 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -27,6 +27,10 @@ on: required: true type: string description: Seed used to invalidate caches + testing-releases: + required: true + type: string + description: A JSON list of releases to test upgrades against package-name: required: false type: string @@ -77,7 +81,7 @@ jobs: - name: Generate Package Test Matrix id: generate-pkg-matrix run: | - tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} + tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} dependencies: name: Setup Test Dependencies @@ -97,7 +101,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} - name: Download Onedir Tarball as an Artifact if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -227,7 +231,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true @@ -260,12 +264,14 @@ jobs: - name: Show System Info & Test Plan run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} + --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ + ${{ contains(matrix.test-chunk, 'upgrade') && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests run: | - tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install\ - --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ + ${{ contains(matrix.test-chunk, 'upgrade') && format('--prev-version {0}', matrix.version) || ''}} - name: Download Test Run Artifacts id: download-artifacts-from-vm diff --git a/cicd/amis.yml b/cicd/amis.yml index 47edcf0184b..8fb4513180f 100644 --- a/cicd/amis.yml +++ b/cicd/amis.yml @@ -1 +1 @@ -centosstream-9-x86_64: ami-044545f7a74d46acc +centosstream-9-x86_64: ami-0bd92f4dca5d74017 diff --git a/cicd/golden-images.json b/cicd/golden-images.json index 75341e64aeb..21c702ca732 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -1,8 +1,8 @@ { "almalinux-8-arm64": { - "ami": "ami-0fc1e14bf9ff422aa", + "ami": "ami-05c1d3dbdeeb94bc6", "ami_description": "CI Image of AlmaLinux 8 arm64", - "ami_name": "salt-project/ci/almalinux/8/arm64/20230418.1731", + "ami_name": "salt-project/ci/almalinux/8/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -10,9 +10,9 @@ "ssh_username": "ec2-user" }, "almalinux-8": { - "ami": "ami-0bae4158c1f126271", + "ami": "ami-0ec1cbc531f10105b", "ami_description": "CI Image of AlmaLinux 8 x86_64", - "ami_name": "salt-project/ci/almalinux/8/x86_64/20230418.1732", + "ami_name": "salt-project/ci/almalinux/8/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -20,9 +20,9 @@ "ssh_username": "ec2-user" }, "almalinux-9-arm64": { - "ami": "ami-08f4d0fbf5d53c3ab", + "ami": "ami-036c495af9dfcf852", "ami_description": "CI Image of AlmaLinux 9 arm64", - "ami_name": "salt-project/ci/almalinux/9/arm64/20230418.1732", + "ami_name": "salt-project/ci/almalinux/9/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -30,9 +30,9 @@ "ssh_username": "ec2-user" }, "almalinux-9": { - "ami": "ami-00404c1cc5c5a08bd", + "ami": "ami-0dbc7030666419671", "ami_description": "CI Image of AlmaLinux 9 x86_64", - "ami_name": "salt-project/ci/almalinux/9/x86_64/20230418.1738", + "ami_name": "salt-project/ci/almalinux/9/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -40,9 +40,9 @@ "ssh_username": "ec2-user" }, "amazonlinux-2-arm64": { - "ami": "ami-05fbdb644d06c27b6", + "ami": "ami-022232915c2a5f2d0", "ami_description": "CI Image of AmazonLinux 2 arm64", - "ami_name": "salt-project/ci/amazonlinux/2/arm64/20230418.1717", + "ami_name": "salt-project/ci/amazonlinux/2/arm64/20230522.0621", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -50,9 +50,9 @@ "ssh_username": "ec2-user" }, "amazonlinux-2": { - "ami": "ami-014171e6c30ec8387", + "ami": "ami-0695f87baa5b5ce15", "ami_description": "CI Image of AmazonLinux 2 x86_64", - "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230418.1718", + "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230522.0620", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -60,9 +60,9 @@ "ssh_username": "ec2-user" }, "archlinux-lts": { - "ami": "ami-00cff81ed2e2fb0f4", + "ami": "ami-0f6424847f98afc04", "ami_description": "CI Image of ArchLinux lts x86_64", - "ami_name": "salt-project/ci/archlinux/lts/x86_64/20230418.1717", + "ami_name": "salt-project/ci/archlinux/lts/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "false", "instance_type": "t3a.large", @@ -70,9 +70,9 @@ "ssh_username": "arch" }, "centos-7-arm64": { - "ami": "ami-051cef43c13fcc0c9", + "ami": "ami-0908831c364e33a37", "ami_description": "CI Image of CentOS 7 arm64", - "ami_name": "salt-project/ci/centos/7/arm64/20230418.1743", + "ami_name": "salt-project/ci/centos/7/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -80,9 +80,9 @@ "ssh_username": "centos" }, "centos-7": { - "ami": "ami-0dcc94e1bea829149", + "ami": "ami-0ace33028ada62ddb", "ami_description": "CI Image of CentOS 7 x86_64", - "ami_name": "salt-project/ci/centos/7/x86_64/20230418.1743", + "ami_name": "salt-project/ci/centos/7/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -90,9 +90,9 @@ "ssh_username": "centos" }, "centosstream-8-arm64": { - "ami": "ami-02783136c1080c782", + "ami": "ami-0b30827dc592b2695", "ami_description": "CI Image of CentOSStream 8 arm64", - "ami_name": "salt-project/ci/centosstream/8/arm64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/8/arm64/20230522.0618", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -100,9 +100,9 @@ "ssh_username": "centos" }, "centosstream-8": { - "ami": "ami-055e35dc7180defad", + "ami": "ami-0929882a7e5cfba5f", "ami_description": "CI Image of CentOSStream 8 x86_64", - "ami_name": "salt-project/ci/centosstream/8/x86_64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/8/x86_64/20230522.0618", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -110,9 +110,9 @@ "ssh_username": "centos" }, "centosstream-9-arm64": { - "ami": "ami-06fd13f7c7c702fc4", + "ami": "ami-00700fb8821b8b8c7", "ami_description": "CI Image of CentOSStream 9 arm64", - "ami_name": "salt-project/ci/centosstream/9/arm64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/9/arm64/20230522.0619", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -120,9 +120,9 @@ "ssh_username": "ec2-user" }, "centosstream-9": { - "ami": "ami-044545f7a74d46acc", + "ami": "ami-0bd92f4dca5d74017", "ami_description": "CI Image of CentOSStream 9 x86_64", - "ami_name": "salt-project/ci/centosstream/9/x86_64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/9/x86_64/20230522.0619", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -130,9 +130,9 @@ "ssh_username": "ec2-user" }, "debian-10-arm64": { - "ami": "ami-045aedc47e7ddfbf1", + "ami": "ami-0f681fc9d5de0c3df", "ami_description": "CI Image of Debian 10 arm64", - "ami_name": "salt-project/ci/debian/10/arm64/20230418.1739", + "ami_name": "salt-project/ci/debian/10/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -140,9 +140,9 @@ "ssh_username": "admin" }, "debian-10": { - "ami": "ami-0a205a9361210b291", + "ami": "ami-0dcf5610590139238", "ami_description": "CI Image of Debian 10 x86_64", - "ami_name": "salt-project/ci/debian/10/x86_64/20230418.1739", + "ami_name": "salt-project/ci/debian/10/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -150,9 +150,9 @@ "ssh_username": "admin" }, "debian-11-arm64": { - "ami": "ami-0be71acc27a8efa60", + "ami": "ami-062b4bf11a864825c", "ami_description": "CI Image of Debian 11 arm64", - "ami_name": "salt-project/ci/debian/11/arm64/20230418.1739", + "ami_name": "salt-project/ci/debian/11/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -160,39 +160,19 @@ "ssh_username": "admin" }, "debian-11": { - "ami": "ami-0ad354da27b34289b", + "ami": "ami-0f400e5fa6806bbca", "ami_description": "CI Image of Debian 11 x86_64", - "ami_name": "salt-project/ci/debian/11/x86_64/20230418.1742", + "ami_name": "salt-project/ci/debian/11/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "admin" }, - "fedora-36-arm64": { - "ami": "ami-00c0ab2829c887922", - "ami_description": "CI Image of Fedora 36 arm64", - "ami_name": "salt-project/ci/fedora/36/arm64/20230418.1726", - "arch": "arm64", - "cloudwatch-agent-available": "true", - "instance_type": "m6g.large", - "is_windows": "false", - "ssh_username": "fedora" - }, - "fedora-36": { - "ami": "ami-0185a1189bff7c771", - "ami_description": "CI Image of Fedora 36 x86_64", - "ami_name": "salt-project/ci/fedora/36/x86_64/20230418.1726", - "arch": "x86_64", - "cloudwatch-agent-available": "true", - "instance_type": "t3a.large", - "is_windows": "false", - "ssh_username": "fedora" - }, "fedora-37-arm64": { - "ami": "ami-075c52fda843ace1b", + "ami": "ami-0d71d6f2b0869842f", "ami_description": "CI Image of Fedora 37 arm64", - "ami_name": "salt-project/ci/fedora/37/arm64/20230418.1726", + "ami_name": "salt-project/ci/fedora/37/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -200,9 +180,9 @@ "ssh_username": "fedora" }, "fedora-37": { - "ami": "ami-099a68403d6c65733", + "ami": "ami-026f494dd4b9d40e8", "ami_description": "CI Image of Fedora 37 x86_64", - "ami_name": "salt-project/ci/fedora/37/x86_64/20230418.1726", + "ami_name": "salt-project/ci/fedora/37/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -210,9 +190,9 @@ "ssh_username": "fedora" }, "fedora-38-arm64": { - "ami": "ami-02fa22d081a9be052", + "ami": "ami-01ba8a7951daf68fb", "ami_description": "CI Image of Fedora 38 arm64", - "ami_name": "salt-project/ci/fedora/38/arm64/20230418.1727", + "ami_name": "salt-project/ci/fedora/38/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -220,9 +200,9 @@ "ssh_username": "fedora" }, "fedora-38": { - "ami": "ami-0a8d949d0bb15bbc0", + "ami": "ami-0699dbe70b69e96aa", "ami_description": "CI Image of Fedora 38 x86_64", - "ami_name": "salt-project/ci/fedora/38/x86_64/20230418.1727", + "ami_name": "salt-project/ci/fedora/38/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -230,9 +210,9 @@ "ssh_username": "fedora" }, "opensuse-15": { - "ami": "ami-089ac311f924f131f", + "ami": "ami-0c594da84f6e1cd96", "ami_description": "CI Image of Opensuse 15 x86_64", - "ami_name": "salt-project/ci/opensuse/15/x86_64/20230418.1744", + "ami_name": "salt-project/ci/opensuse/15/x86_64/20230522.0619", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -240,9 +220,9 @@ "ssh_username": "ec2-user" }, "photonos-3": { - "ami": "ami-03ce6db789f90957b", + "ami": "ami-0db2ebdb9bc3400ef", "ami_description": "CI Image of PhotonOS 3 x86_64", - "ami_name": "salt-project/ci/photonos/3/x86_64/20230418.1717", + "ami_name": "salt-project/ci/photonos/3/x86_64/20230522.0617", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -250,9 +230,9 @@ "ssh_username": "root" }, "photonos-4": { - "ami": "ami-0ef9996c398479d65", + "ami": "ami-08a6b6bbf6779a538", "ami_description": "CI Image of PhotonOS 4 x86_64", - "ami_name": "salt-project/ci/photonos/4/x86_64/20230418.1717", + "ami_name": "salt-project/ci/photonos/4/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -260,9 +240,9 @@ "ssh_username": "root" }, "ubuntu-20.04-arm64": { - "ami": "ami-0c4d21e0772489c0d", + "ami": "ami-0dccc0de7a38cca90", "ami_description": "CI Image of Ubuntu 20.04 arm64", - "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230418.1728", + "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -270,9 +250,9 @@ "ssh_username": "ubuntu" }, "ubuntu-20.04": { - "ami": "ami-09ae6200865b29b9b", + "ami": "ami-05e51f893a626b579", "ami_description": "CI Image of Ubuntu 20.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230418.1728", + "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -280,9 +260,9 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04-arm64": { - "ami": "ami-024fe5d0b838f88f7", + "ami": "ami-0c958272da6c09ca6", "ami_description": "CI Image of Ubuntu 22.04 arm64", - "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230418.1731", + "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -290,9 +270,9 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04": { - "ami": "ami-0d83f00f084d91451", + "ami": "ami-09e45f31ccafcdcec", "ami_description": "CI Image of Ubuntu 22.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230418.1732", + "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -300,9 +280,9 @@ "ssh_username": "ubuntu" }, "windows-2016": { - "ami": "ami-078d9229cfaf24d1b", + "ami": "ami-099db55543619f54a", "ami_description": "CI Image of Windows 2016 x86_64", - "ami_name": "salt-project/ci/windows/2016/x86_64/20230418.1717", + "ami_name": "salt-project/ci/windows/2016/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -310,9 +290,9 @@ "ssh_username": "Administrator" }, "windows-2019": { - "ami": "ami-0ab20823965e1aa7a", + "ami": "ami-0860ee5bc9ee93e13", "ami_description": "CI Image of Windows 2019 x86_64", - "ami_name": "salt-project/ci/windows/2019/x86_64/20230418.1717", + "ami_name": "salt-project/ci/windows/2019/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -320,9 +300,9 @@ "ssh_username": "Administrator" }, "windows-2022": { - "ami": "ami-054c4cf04c0f31eb1", + "ami": "ami-032e3abce2aa98da7", "ami_description": "CI Image of Windows 2022 x86_64", - "ami_name": "salt-project/ci/windows/2022/x86_64/20230418.1717", + "ami_name": "salt-project/ci/windows/2022/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index ec3d939fe03..b554b57aafa 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,4 +1,2 @@ -python_version_linux: "3.10.11" -python_version_macos: "3.10.11" -python_version_windows: "3.10.11" +python_version: "3.10.11" relenv_version: "0.12.3" diff --git a/pkg/debian/rules b/pkg/debian/rules index a73b38b4041..d4fbf3775f7 100755 --- a/pkg/debian/rules +++ b/pkg/debian/rules @@ -15,14 +15,16 @@ override_dh_auto_clean: ifeq ("${SALT_ONEDIR_ARCHIVE}", "") override_dh_auto_build: + export FETCH_RELENV_VERSION=$${SALT_RELENV_VERSION} mkdir -p build/onedir python3 -m venv --clear --copies build/onedir/venv build/onedir/venv/bin/python3 -m pip install relenv==$${SALT_RELENV_VERSION} + export FETCH_RELENV_VERSION=$${SALT_RELENV_VERSION} export PY=$$(build/onedir/venv/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') \ && build/onedir/venv/bin/python3 -m pip install -r requirements/static/ci/py$${PY}/tools.txt - build/onedir/venv/bin/relenv fetch - build/onedir/venv/bin/relenv toolchain fetch - build/onedir/venv/bin/tools pkg build onedir-dependencies --arch $${SALT_PACKAGE_ARCH} --python-version $${SALT_PYTHON_VERSION} --package-name build/onedir/salt --platform linux + build/onedir/venv/bin/relenv fetch --arch=$${SALT_PACKAGE_ARCH} --python=$${SALT_PYTHON_VERSION} + build/onedir/venv/bin/relenv toolchain fetch --arch=$${SALT_PACKAGE_ARCH} + build/onedir/venv/bin/tools pkg build onedir-dependencies --arch $${SALT_PACKAGE_ARCH} --relenv-version=$${SALT_RELENV_VERSION} --python-version $${SALT_PYTHON_VERSION} --package-name build/onedir/salt --platform linux # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; diff --git a/pkg/debian/salt-cloud.postinst b/pkg/debian/salt-cloud.postinst index 12a955b9349..a92551161da 100644 --- a/pkg/debian/salt-cloud.postinst +++ b/pkg/debian/salt-cloud.postinst @@ -1,5 +1,6 @@ case "$1" in configure) - chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy + PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush;") + chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy ;; esac diff --git a/pkg/macos/build_python.sh b/pkg/macos/build_python.sh index 23fce00eabb..be179b7b166 100755 --- a/pkg/macos/build_python.sh +++ b/pkg/macos/build_python.sh @@ -21,26 +21,6 @@ # The default version to be built # TODO: The is not selectable via RELENV yet. This has to match whatever relenv # TODO: is building -PY_VERSION="3.10.9" - -# Valid versions supported by macOS -PY_VERSIONS=( - "3.10.9" - "3.10.8" - "3.10.7" - "3.9.16" - "3.9.15" - "3.9.14" - "3.9.13" - "3.9.12" - "3.9.11" - "3.8.16" - "3.8.15" - "3.8.14" - "3.8.13" - "3.8.12" - "3.8.11" -) # Locations SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -65,14 +45,11 @@ _usage() { echo "" echo " -h, --help this message" echo " -b, --build build python instead of fetching" - echo " -v, --version version of python to install" - echo " python version must be one of:" - for i in "${PY_VERSIONS[@]}"; do - echo " - $i" - done + echo " -v, --version version of python to install, must be available with relenv" + echo " -r, --relenv-version version of python to install, must be available with relenv" echo "" - echo " To build python 3.9.15:" - echo " example: $0 --version 3.9.15" + echo " To build python 3.10.11:" + echo " example: $0 --version 3.10.11" } # _msg @@ -109,7 +86,12 @@ while true; do ;; -v | --version ) shift - PY_VERSION="$*" + PY_VERSION="$1" + shift + ;; + -r | --relenv-version ) + shift + RELENV_VERSION="$1" shift ;; -b | --build ) @@ -123,19 +105,13 @@ while true; do exit 1 ;; * ) - PY_VERSION="$*" - shift + echo "Invalid Arguments: $*" + _usage + exit 1 ;; esac done -if ! [[ " ${PY_VERSIONS[*]} " =~ " $PY_VERSION " ]]; then - echo "Invalid Python Version: $PY_VERSION" - echo "" - _usage - exit 1 -fi - #------------------------------------------------------------------------------- # Script Start #------------------------------------------------------------------------------- @@ -214,12 +190,17 @@ fi # Installing Relenv #------------------------------------------------------------------------------- _msg "Installing relenv" -pip install relenv >/dev/null 2>&1 -if [ -n "$(pip show relenv)" ]; then +if [ -n "${RELENV_VERSION}" ]; then + pip install relenv==${RELENV_VERSION} +else + pip install relenv +fi +if [ -n "$(relenv --version)" ]; then _success else _failure fi +export RELENV_FETCH_VERSION=$(relenv --version) #------------------------------------------------------------------------------- # Building Python with Relenv @@ -231,8 +212,8 @@ else # We want to suppress the output here so it looks nice # To see the output, remove the output redirection _msg "Fetching python (relenv)" - relenv fetch >/dev/null 2>&1 - if [ -f "$RELENV_DIR/build/x86_64-macos.tar.xz" ]; then + relenv fetch --python=$PY_VERSION + if [ -f "$RELENV_DIR/build/$PY_VERSION-x86_64-macos.tar.xz" ]; then _success else _failure @@ -240,7 +221,7 @@ else fi _msg "Extracting python environment" -relenv create "$BUILD_DIR/opt/salt" +relenv create --python=$PY_VERSION "$BUILD_DIR/opt/salt" if [ -f "$BLD_PY_BIN" ]; then _success else diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 75b186f1cc1..9a6f71cba38 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -135,14 +135,16 @@ mkdir -p $RPM_BUILD_DIR/build cd $RPM_BUILD_DIR %if "%{getenv:SALT_ONEDIR_ARCHIVE}" == "" + export FETCH_RELENV_VERSION=${SALT_RELENV_VERSION} python3 -m venv --clear --copies build/venv build/venv/bin/python3 -m pip install relenv==${SALT_RELENV_VERSION} + export FETCH_RELENV_VERSION=${SALT_RELENV_VERSION} export PY=$(build/venv/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') build/venv/bin/python3 -m pip install -r %{_salt_src}/requirements/static/ci/py${PY}/tools.txt - build/venv/bin/relenv fetch - build/venv/bin/relenv toolchain fetch + build/venv/bin/relenv fetch --arch=${SALT_PACKAGE_ARCH} --python=${SALT_PYTHON_VERSION} + build/venv/bin/relenv toolchain fetch --arch=${SALT_PACKAGE_ARCH} cd %{_salt_src} - $RPM_BUILD_DIR/build/venv/bin/tools pkg build onedir-dependencies --arch ${SALT_PACKAGE_ARCH} --python-version ${SALT_PYTHON_VERSION} --package-name $RPM_BUILD_DIR/build/salt --platform linux + $RPM_BUILD_DIR/build/venv/bin/tools pkg build onedir-dependencies --arch ${SALT_PACKAGE_ARCH} --relenv-version=${SALT_RELENV_VERSION} --python-version ${SALT_PYTHON_VERSION} --package-name $RPM_BUILD_DIR/build/salt --platform linux # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in # the /bin directory diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 89f54fe2f72..5eba84bb4aa 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -44,6 +44,21 @@ def grains(sminion): return sminion.opts["grains"].copy() +@pytest.fixture(scope="session", autouse=True) +def _system_up_to_date( + grains, + shell, +): + if grains["os_family"] == "Debian": + ret = shell.run("apt", "update") + assert ret.returncode == 0 + ret = shell.run("apt", "upgrade", "-y") + assert ret.returncode == 0 + elif grains["os_family"] == "Redhat": + ret = shell.run("yum", "update", "-y") + assert ret.returncode == 0 + + def pytest_addoption(parser): """ register argparse-style options and ini-style config values. @@ -348,6 +363,8 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): master_script = True # this check will need to be changed to install_salt.relenv # once the package version returns 3006 and not 3005 on master + if install_salt.relenv: + master_script = True elif not install_salt.upgrade: master_script = True @@ -360,6 +377,8 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): python_executable = install_salt.bin_dir / "Scripts" / "python.exe" if install_salt.classic: python_executable = install_salt.bin_dir / "python.exe" + if install_salt.relenv: + python_executable = install_salt.install_dir / "Scripts" / "python.exe" factory = salt_factories.salt_master_daemon( random_string("master-"), defaults=config_defaults, diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index 5bb0e3a96a4..25b2bd89051 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -81,6 +81,11 @@ def root_url(salt_release): return _root_url +@pytest.fixture(scope="module") +def package_type(): + return os.environ.get("DOWNLOAD_TEST_PACKAGE_TYPE") + + def get_salt_release(): salt_release = os.environ.get("SALT_RELEASE") pkg_test_type = os.environ.get("PKG_TEST_TYPE", "install") @@ -130,9 +135,24 @@ def salt_release(): yield get_salt_release() +@pytest.fixture(scope="module") +def onedir_install_path(tmp_path_factory): + install_path = tmp_path_factory.mktemp("onedir_install") + yield install_path + shutil.rmtree(install_path, ignore_errors=True) + + @pytest.fixture(scope="module") def _setup_system( - tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name, repo_subpath + grains, + shell, + root_url, + salt_release, + gpg_key_name, + repo_subpath, + package_type, + tmp_path_factory, + onedir_install_path, ): downloads_path = tmp_path_factory.mktemp("downloads") try: @@ -144,6 +164,8 @@ def _setup_system( salt_release=salt_release, downloads_path=downloads_path, repo_subpath=repo_subpath, + package_type=package_type, + onedir_install_path=onedir_install_path, ): yield else: @@ -154,6 +176,8 @@ def _setup_system( salt_release=salt_release, downloads_path=downloads_path, repo_subpath=repo_subpath, + package_type=package_type, + onedir_install_path=onedir_install_path, ) elif grains["os"] == "Amazon": setup_redhat_family( @@ -210,6 +234,8 @@ def _setup_system( downloads_path=downloads_path, gpg_key_name=gpg_key_name, repo_subpath=repo_subpath, + package_type=package_type, + onedir_install_path=onedir_install_path, ) else: pytest.fail("Don't know how to handle %s", grains["osfinger"]) @@ -286,152 +312,237 @@ def setup_debian_family( downloads_path, gpg_key_name, repo_subpath, + package_type, + onedir_install_path, ): arch = os.environ.get("SALT_REPO_ARCH") or "amd64" - if arch == "aarch64": - arch = "arm64" - elif arch == "x86_64": - arch = "amd64" - ret = shell.run("apt-get", "update", "-y", check=False) if ret.returncode != 0: pytest.fail(str(ret)) - if repo_subpath == "minor": - repo_url_base = ( - f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" - ) - else: - repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" - gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" + if package_type == "package": + if arch == "aarch64": + arch = "arm64" + elif arch == "x86_64": + arch = "amd64" - try: - pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) - except Exception as exc: - pytest.fail(f"Failed to download {gpg_file_url}: {exc}") - - salt_sources_path = downloads_path / "salt.list" - salt_sources_path.write_text( - f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n" - ) - commands = [ - ( - "mv", - str(downloads_path / gpg_key_name), - f"/usr/share/keyrings/{gpg_key_name}", - ), - ( - "mv", - str(salt_sources_path), - "/etc/apt/sources.list.d/salt.list", - ), - ("apt-get", "install", "-y", "ca-certificates"), - ("update-ca-certificates",), - ("apt-get", "update"), - ( - "apt-get", - "install", - "-y", - "salt-master", - "salt-minion", - "salt-ssh", - "salt-syndic", - "salt-cloud", - "salt-api", - ), - ] - for cmd in commands: - ret = shell.run(*cmd) - if ret.returncode != 0: - pytest.fail(str(ret)) - - -def setup_macos(shell, root_url, salt_release, downloads_path, repo_subpath): - arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" - if arch == "aarch64": - arch = "arm64" - - if packaging.version.parse(salt_release) > packaging.version.parse("3005"): - mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg" if repo_subpath == "minor": - mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}" + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" else: - mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}" + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" + + try: + pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) + except Exception as exc: + pytest.fail(f"Failed to download {gpg_file_url}: {exc}") + + salt_sources_path = downloads_path / "salt.list" + salt_sources_path.write_text( + f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n" + ) + commands = [ + ( + "mv", + str(downloads_path / gpg_key_name), + f"/usr/share/keyrings/{gpg_key_name}", + ), + ( + "mv", + str(salt_sources_path), + "/etc/apt/sources.list.d/salt.list", + ), + ("apt-get", "install", "-y", "ca-certificates"), + ("update-ca-certificates",), + ("apt-get", "update"), + ( + "apt-get", + "install", + "-y", + "salt-master", + "salt-minion", + "salt-ssh", + "salt-syndic", + "salt-cloud", + "salt-api", + ), + ] + for cmd in commands: + ret = shell.run(*cmd) + if ret.returncode != 0: + pytest.fail(str(ret)) else: - mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}" - mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg" + # We are testing the onedir download + onedir_name = f"salt-{salt_release}-onedir-linux-{arch}.tar.xz" + if repo_subpath == "minor": + repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}" + else: + repo_url_base = f"{root_url}/onedir/{repo_subpath}" + onedir_url = f"{repo_url_base}/{onedir_name}" + onedir_location = downloads_path / onedir_name + onedir_extracted = onedir_install_path - mac_pkg_path = downloads_path / mac_pkg - pytest.helpers.download_file(mac_pkg_url, mac_pkg_path) + try: + pytest.helpers.download_file(onedir_url, onedir_location) + except Exception as exc: + pytest.fail(f"Failed to download {onedir_url}: {exc}") - ret = shell.run( - "installer", - "-pkg", - str(mac_pkg_path), - "-target", - "/", - check=False, - ) - assert ret.returncode == 0, ret + shell.run("tar", "xvf", str(onedir_location), "-C", str(onedir_extracted)) + + +def setup_macos( + shell, + root_url, + salt_release, + downloads_path, + repo_subpath, + package_type, + onedir_install_path, +): + arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" + if package_type == "package": + if arch == "aarch64": + arch = "arm64" + + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg" + if repo_subpath == "minor": + mac_pkg_url = ( + f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}" + ) + else: + mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}" + else: + mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}" + mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg" + + mac_pkg_path = downloads_path / mac_pkg + pytest.helpers.download_file(mac_pkg_url, mac_pkg_path) + + ret = shell.run( + "installer", + "-pkg", + str(mac_pkg_path), + "-target", + "/", + check=False, + ) + assert ret.returncode == 0, ret + else: + # We are testing the onedir download + onedir_name = f"salt-{salt_release}-onedir-darwin-{arch}.tar.xz" + if repo_subpath == "minor": + repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}" + else: + repo_url_base = f"{root_url}/onedir/{repo_subpath}" + onedir_url = f"{repo_url_base}/{onedir_name}" + onedir_location = downloads_path / onedir_name + onedir_extracted = onedir_install_path + + try: + pytest.helpers.download_file(onedir_url, onedir_location) + except Exception as exc: + pytest.fail(f"Failed to download {onedir_url}: {exc}") + + shell.run("tar", "xvf", str(onedir_location), "-C", str(onedir_extracted)) @contextlib.contextmanager -def setup_windows(shell, root_url, salt_release, downloads_path, repo_subpath): +def setup_windows( + shell, + root_url, + salt_release, + downloads_path, + repo_subpath, + package_type, + onedir_install_path, +): try: - root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") - arch = os.environ.get("SALT_REPO_ARCH") or "amd64" - install_type = os.environ.get("INSTALL_TYPE") or "msi" - if packaging.version.parse(salt_release) > packaging.version.parse("3005"): - if install_type.lower() == "nsis": - if arch.lower() != "x86": - arch = arch.upper() - win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe" + if package_type != "onedir": + root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") + + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + if package_type.lower() == "nsis": + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe" + else: + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" + if repo_subpath == "minor": + win_pkg_url = ( + f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" + ) + else: + win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}" + ssm_bin = root_dir / "ssm.exe" else: - if arch.lower() != "x86": - arch = arch.upper() - win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" - if repo_subpath == "minor": - win_pkg_url = ( - f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" + win_pkg = f"salt-{salt_release}-windows-{arch}.exe" + win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" + ssm_bin = root_dir / "bin" / "ssm_bin" + + pkg_path = downloads_path / win_pkg + + pytest.helpers.download_file(win_pkg_url, pkg_path) + if package_type.lower() == "nsis": + ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False) + else: + ret = shell.run( + "msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""' ) + assert ret.returncode == 0, ret + + log.debug("Removing installed salt-minion service") + ret = shell.run( + "cmd", + "/c", + str(ssm_bin), + "remove", + "salt-minion", + "confirm", + check=False, + ) + assert ret.returncode == 0, ret + else: + # We are testing the onedir download + onedir_name = f"salt-{salt_release}-onedir-windows-{arch}.zip" + if repo_subpath == "minor": + repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}" else: - win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}" - ssm_bin = root_dir / "ssm.exe" - else: - win_pkg = f"salt-{salt_release}-windows-{arch}.exe" - win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" - ssm_bin = root_dir / "bin" / "ssm_bin" + repo_url_base = f"{root_url}/onedir/{repo_subpath}" + onedir_url = f"{repo_url_base}/{onedir_name}" + onedir_location = downloads_path / onedir_name + onedir_extracted = onedir_install_path - pkg_path = downloads_path / win_pkg + try: + pytest.helpers.download_file(onedir_url, onedir_location) + except Exception as exc: + pytest.fail(f"Failed to download {onedir_url}: {exc}") - pytest.helpers.download_file(win_pkg_url, pkg_path) - if install_type.lower() == "nsis": - ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False) - else: - ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""') - assert ret.returncode == 0, ret - - log.debug("Removing installed salt-minion service") - ret = shell.run( - "cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False - ) - assert ret.returncode == 0, ret + shell.run("unzip", str(onedir_location), "-d", str(onedir_extracted)) yield finally: # We need to uninstall the MSI packages, otherwise they will not install correctly - if install_type.lower() == "msi": + if package_type.lower() == "msi": ret = shell.run("msiexec", "/qn", "/x", str(pkg_path)) assert ret.returncode == 0, ret @pytest.fixture(scope="module") -def install_dir(_setup_system): - if platform.is_windows(): - return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt").resolve() - if platform.is_darwin(): - return pathlib.Path("/opt", "salt") - return pathlib.Path("/opt", "saltstack", "salt") +def install_dir(_setup_system, package_type, onedir_install_path): + if package_type != "onedir": + if platform.is_windows(): + return pathlib.Path( + os.getenv("ProgramFiles"), "Salt Project", "Salt" + ).resolve() + if platform.is_darwin(): + return pathlib.Path("/opt", "salt") + return pathlib.Path("/opt", "saltstack", "salt") + else: + # We are testing the onedir + return onedir_install_path / "salt" @pytest.fixture(scope="module") diff --git a/pkg/tests/integration/test_multi_minion.py b/pkg/tests/integration/test_multi_minion.py new file mode 100644 index 00000000000..13d64f31f6e --- /dev/null +++ b/pkg/tests/integration/test_multi_minion.py @@ -0,0 +1,127 @@ +import os +import pathlib +import subprocess + +import psutil +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_windows, +] + + +@pytest.fixture +def mm_script(install_salt): + yield install_salt.ssm_bin.parent / "multi-minion.ps1" + + +@pytest.fixture(scope="function") +def mm_conf(mm_script): + yield pathlib.Path(os.getenv("LocalAppData"), "Salt Project", "Salt", "conf") + subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-d"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + + +def test_script_present(mm_script): + """ + Ensure the multi-minion.ps1 file is present in the root of the installation + """ + assert mm_script.exists() + + +def test_install(mm_script, mm_conf): + """ + Install a second minion with default settings. Should create a minion config + file in Local AppData + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '")], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("master: salt") > -1 + + +def test_install_master(mm_script, mm_conf): + """ + Install a second minion and set the master to spongebob + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-m", "spongebob"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("master: spongebob") > -1 + + +def test_install_prefix(mm_script, mm_conf): + """ + Install a second minion and add a prefix to the minion id + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-p", "squarepants"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("id: squarepants") > -1 + + +def test_install_log_level(mm_script, mm_conf): + """ + Install a second minion and set the log level in the log file to debug + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-l", "debug"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("log_level_logfile: debug") > -1 + + +def test_install_start(mm_script, mm_conf): + """ + Install a second minion and start that minion in a hidden process + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-s"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("master: salt") > -1 + + found = False + for p in psutil.process_iter(["cmdline", "name"]): + if p.info["name"] and p.info["name"] == "salt-minion.exe": + if f"{mm_conf}" in p.info["cmdline"]: + found = True + assert found is True diff --git a/pkg/tests/integration/test_pkg.py b/pkg/tests/integration/test_pkg.py index ef220b124b6..5aedefa6ef1 100644 --- a/pkg/tests/integration/test_pkg.py +++ b/pkg/tests/integration/test_pkg.py @@ -3,13 +3,6 @@ import sys import pytest -@pytest.fixture(scope="module") -def grains(salt_call_cli): - ret = salt_call_cli.run("--local", "grains.items") - assert ret.data, ret - return ret.data - - @pytest.fixture(scope="module") def pkg_name(salt_call_cli, grains): if sys.platform.startswith("win"): diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index d1c8d504fa0..74ec8bb0093 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -1,5 +1,6 @@ import pathlib import subprocess +import sys import psutil import pytest @@ -61,7 +62,9 @@ def test_salt_cloud_dirs(install_salt): Test the correct user is running the Salt Master """ paths = [ - "/opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy", + "/opt/saltstack/salt/lib/python{}.{}/site-packages/salt/cloud/deploy".format( + *sys.version_info + ), "/etc/salt/cloud.deploy.d", ] for name in paths: diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 57b6ccd4d00..11bfdb6185b 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -596,14 +596,32 @@ class SaltPkgInstall: self.proc.run("launchctl", "disable", f"system/{service_name}") self.proc.run("launchctl", "bootout", "system", str(plist_file)) elif upgrade: + env = os.environ.copy() + extra_args = [] + if self.distro_id in ("ubuntu", "debian"): + env["DEBIAN_FRONTEND"] = "noninteractive" + extra_args = [ + "-o", + "DPkg::Options::=--force-confdef", + "-o", + "DPkg::Options::=--force-confold", + ] log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) - ret = self.proc.run(self.pkg_mngr, "upgrade", "-y", *self.pkgs) + args = extra_args + self.pkgs + ret = self.proc.run( + self.pkg_mngr, + "upgrade", + "-y", + *args, + _timeout=120, + env=env, + ) else: log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs) if not platform.is_darwin() and not platform.is_windows(): # Make sure we don't have any trailing references to old package file locations - assert "No such file or directory" not in ret.stdout + ret.returncode == 0 assert "/saltstack/salt/run" not in ret.stdout log.info(ret) self._check_retcode(ret) @@ -643,6 +661,7 @@ class SaltPkgInstall: minor_ver = self.minor pkg_version = self.pkg_version full_version = f"{self.major}.{self.minor}-{pkg_version}" + relenv = int(major_ver) >= 3006 min_ver = f"{major_ver}" distro_name = self.distro_name @@ -658,6 +677,9 @@ class SaltPkgInstall: gpg_key = "SALTSTACK-GPG-KEY.pub" if self.distro_version == "9": gpg_key = "SALTSTACK-GPG-KEY2.pub" + if relenv: + gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.pub" + if platform.is_aarch64(): arch = "aarch64" else: @@ -695,15 +717,20 @@ class SaltPkgInstall: else: arch = "amd64" pathlib.Path("/etc/apt/keyrings").mkdir(parents=True, exist_ok=True) + gpg_dest = "salt-archive-keyring.gpg" + gpg_key = gpg_dest + if relenv: + gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.gpg" + download_file( - f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}/salt-archive-keyring.gpg", - "/etc/apt/keyrings/salt-archive-keyring.gpg", + f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}/{gpg_key}", + f"/etc/apt/keyrings/{gpg_dest}", ) with open( pathlib.Path("/etc", "apt", "sources.list.d", "salt.list"), "w" ) as fp: fp.write( - f"deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch={arch}] " + f"deb [signed-by=/etc/apt/keyrings/{gpg_dest} arch={arch}] " f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver} {self.distro_codename} main" ) ret = self.proc.run(self.pkg_mngr, "update") @@ -722,12 +749,21 @@ class SaltPkgInstall: self.bin_dir = self.install_dir / "bin" self.run_root = self.bin_dir / f"salt.exe" self.ssm_bin = self.bin_dir / "ssm.exe" - if self.file_ext == "msi": + if self.file_ext == "msi" or relenv: self.ssm_bin = self.install_dir / "ssm.exe" if not self.classic: - win_pkg = f"salt-{full_version}-windows-amd64.{self.file_ext}" - win_pkg_url = f"https://repo.saltproject.io/salt/py3/windows/{full_version}/{win_pkg}" + if not relenv: + win_pkg = f"salt-{self.prev_version}-windows-amd64.{self.file_ext}" + win_pkg_url = f"https://repo.saltproject.io/salt/py3/windows/{self.prev_version}/{win_pkg}" + else: + if self.file_ext == "msi": + win_pkg = ( + f"Salt-Minion-{self.prev_version}-Py3-AMD64.{self.file_ext}" + ) + elif self.file_ext == "exe": + win_pkg = f"Salt-Minion-{self.prev_version}-Py3-AMD64-Setup.{self.file_ext}" + win_pkg_url = f"https://repo.saltproject.io/salt/py3/windows/{major_ver}/{win_pkg}" else: if self.file_ext == "msi": win_pkg = f"Salt-Minion-{min_ver}-1-Py3-AMD64.{self.file_ext}" @@ -736,10 +772,8 @@ class SaltPkgInstall: win_pkg_url = f"https://repo.saltproject.io/windows/{win_pkg}" pkg_path = pathlib.Path(r"C:\TEMP", win_pkg) pkg_path.parent.mkdir(exist_ok=True) - ret = requests.get(win_pkg_url) + download_file(win_pkg_url, pkg_path) - with open(pkg_path, "wb") as fp: - fp.write(ret.content) if self.file_ext == "msi": # Write a batch file to run the installer. It is impossible to # perform escaping of the START_MINION property that the MSI @@ -764,11 +798,16 @@ class SaltPkgInstall: elif platform.is_darwin(): if self.classic: - mac_pkg = f"salt-{min_ver}.{minor_ver}-1-py3-x86_64.pkg" + mac_pkg = f"salt-{self.prev_version}-py3-x86_64.pkg" mac_pkg_url = f"https://repo.saltproject.io/osx/{mac_pkg}" else: - mac_pkg = f"salt-{min_ver}.{minor_ver}-1-macos-x86_64.pkg" - mac_pkg_url = f"https://repo.saltproject.io/salt/py3/macos/{major_ver}.{minor_ver}-1/{mac_pkg}" + if not relenv: + mac_pkg = f"salt-{self.prev_version}-macos-x86_64.pkg" + mac_pkg_url = f"https://repo.saltproject.io/salt/py3/macos/{self.prev_version}/{mac_pkg}" + else: + mac_pkg = f"salt-{self.prev_version}-py3-x86_64.pkg" + mac_pkg_url = f"https://repo.saltproject.io/salt/py3/macos/{major_ver}/{mac_pkg}" + mac_pkg_path = f"/tmp/{mac_pkg}" if not os.path.exists(mac_pkg_path): download_file( diff --git a/pkg/tests/upgrade/test_salt_upgrade.py b/pkg/tests/upgrade/test_salt_upgrade.py index 3aa50b0ecd7..0514eb619c0 100644 --- a/pkg/tests/upgrade/test_salt_upgrade.py +++ b/pkg/tests/upgrade/test_salt_upgrade.py @@ -19,6 +19,7 @@ def test_salt_upgrade(salt_call_cli, salt_minion, install_salt): assert install.returncode == 0 use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) assert "Authentication information could" in use_lib.stderr + # upgrade Salt from previous version and test install_salt.install(upgrade=True) ret = salt_call_cli.run("test.ping") diff --git a/pkg/windows/build.ps1 b/pkg/windows/build.ps1 index 0d87d604007..6c5cac2ac17 100644 --- a/pkg/windows/build.ps1 +++ b/pkg/windows/build.ps1 @@ -39,16 +39,15 @@ param( [Parameter(Mandatory=$false)] [ValidatePattern("^\d{1,2}.\d{1,2}.\d{1,2}$")] [ValidateSet( - "3.11.2", - "3.10.10" + "3.11.3", + "3.10.11" )] [Alias("p")] - # The version of Python to be built. Pythonnet only supports up to Python - # 3.8 for now. Pycurl stopped building wheel files after 7.43.0.5 which - # supported up to 3.8. So we're pinned to the latest version of Python 3.8. - # We may have to drop support for pycurl. - # Default is: 3.8.16 - [String] $PythonVersion = "3.10.10", + [String] $PythonVersion = "3.10.11", + + [Parameter(Mandatory=$false)] + [Alias("r")] + [String] $RelenvVersion = "0.12.3", [Parameter(Mandatory=$false)] [Alias("b")] @@ -111,6 +110,7 @@ Write-Host $("#" * 80) Write-Host "Build Salt Installer Packages" -ForegroundColor Cyan Write-Host "- Salt Version: $Version" Write-Host "- Python Version: $PythonVersion" +Write-Host "- Relenv Version: $RelenvVersion" Write-Host "- Architecture: $Architecture" Write-Host $("v" * 80) @@ -165,9 +165,10 @@ if ( ! $SkipInstall ) { $KeywordArguments = @{ Version = $PythonVersion Architecture = $Architecture + RelenvVersion = $RelenvVersion } if ( $Build ) { - $KeywordArguments["Build"] = $true + $KeywordArguments["Build"] = $false } if ( $CICD ) { $KeywordArguments["CICD"] = $true diff --git a/pkg/windows/build_python.ps1 b/pkg/windows/build_python.ps1 index 35cdf1fb0e3..e91f41fea00 100644 --- a/pkg/windows/build_python.ps1 +++ b/pkg/windows/build_python.ps1 @@ -18,16 +18,15 @@ param( [Parameter(Mandatory=$false)] [ValidatePattern("^\d{1,2}.\d{1,2}.\d{1,2}$")] [ValidateSet( - "3.11.2", - "3.10.10" + "3.11.3", + "3.10.11" )] [Alias("v")] - # The version of Python to be built. Pythonnet only supports up to Python - # 3.8 for now. Pycurl stopped building wheel files after 7.43.0.5 which - # supported up to 3.8. So we're pinned to the latest version of Python 3.8. - # We may have to drop support for pycurl or build it ourselves. - # Default is: 3.8.16 - [String] $Version = "3.10.10", + [String] $Version = "3.10.11", + + [Parameter(Mandatory=$false)] + [Alias("r")] + [String] $RelenvVersion = "0.12.3", [Parameter(Mandatory=$false)] [ValidateSet("x64", "x86", "amd64")] @@ -85,6 +84,7 @@ if ( $Build ) { } Write-Host "$SCRIPT_MSG" -ForegroundColor Cyan Write-Host "- Python Version: $Version" +Write-Host "- Relenv Version: $RelenvVersion" Write-Host "- Architecture: $Architecture" Write-Host "- Build: $Build" Write-Host $("-" * 80) @@ -227,7 +227,7 @@ if ( $env:VIRTUAL_ENV ) { # Installing Relenv #------------------------------------------------------------------------------- Write-Host "Installing Relenv: " -NoNewLine -pip install relenv --disable-pip-version-check | Out-Null +pip install relenv==$RelenvVersion --disable-pip-version-check | Out-Null $output = pip list --disable-pip-version-check if ("relenv" -in $output.split()) { Write-Result "Success" -ForegroundColor Green @@ -235,6 +235,7 @@ if ("relenv" -in $output.split()) { Write-Result "Failed" -ForegroundColor Red exit 1 } +$env:RELENV_FETCH_VERSION=$RelenvVersion #------------------------------------------------------------------------------- # Building Python with Relenv diff --git a/pkg/windows/clean.ps1 b/pkg/windows/clean.ps1 index 7d2234ad6b8..466cf812dcc 100644 --- a/pkg/windows/clean.ps1 +++ b/pkg/windows/clean.ps1 @@ -140,6 +140,33 @@ if ( Test-Path -Path "$RELENV_DIR" ) { } } +#------------------------------------------------------------------------------- +# Remove MSI build files +#------------------------------------------------------------------------------- +$files = @( + "msi/CustomAction01/CustomAction01.CA.dll", + "msi/CustomAction01/CustomAction01.dll", + "msi/CustomAction01/CustomAction01.pdb", + "msi/Product-discovered-files-config.wixobj", + "msi/Product-discovered-files-config.wxs", + "msi/Product-discovered-files-x64.wixobj", + "msi/Product-discovered-files-x64.wxs", + "msi/Product.wixobj" +) +$files | ForEach-Object { + if ( Test-Path -Path "$SCRIPT_DIR\$_" ) { + # Use .net, the powershell function is asynchronous + Write-Host "Removing $_`: " -NoNewline + [System.IO.File]::Delete("$SCRIPT_DIR\$_") + if ( ! (Test-Path -Path "$SCRIPT_DIR\$_") ) { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } + } +} + #------------------------------------------------------------------------------- # Script Completed #------------------------------------------------------------------------------- diff --git a/pkg/windows/multi-minion.cmd b/pkg/windows/multi-minion.cmd new file mode 100644 index 00000000000..3142158b469 --- /dev/null +++ b/pkg/windows/multi-minion.cmd @@ -0,0 +1,5 @@ +:: This is a helper script for multi-minion.ps1. +:: See multi-minion.ps1 for documentation +@ echo off +Set "CurDir=%~dp0" +PowerShell -ExecutionPolicy RemoteSigned -File "%CurDir%\multi-minion.ps1" %* diff --git a/pkg/windows/multi-minion.ps1 b/pkg/windows/multi-minion.ps1 new file mode 100644 index 00000000000..8ad709c04cc --- /dev/null +++ b/pkg/windows/multi-minion.ps1 @@ -0,0 +1,363 @@ +<# +.SYNOPSIS +Script for setting up an additional salt-minion on a machine with Salt installed + +.DESCRIPTION +This script configures an additional minion on a machine that already has a Salt +installation using one of the Salt packages. It sets up the directory structure +required by Salt. It also lays down a minion config to be used +by the Salt minion. Additionaly, this script can start the new minion in a +hidden window. + +You can also remove the multiminion setup with this script. + +This script does not need to be run with Administrator privileges + +If a minion that was configured with this script is already running, the script +will exit. + +The following example sets up a minion for the current logged in account. It +configures the minion to connect to the master at 192.168.0.10 + +.EXAMPLE +PS>multi-minion.ps1 -Master 192.168.0.10 +PS>multi-minion.ps1 -m 192.168.0.10 + +The following example sets up a minion for the current logged in account. It +configures the minion to connect to the master at 192.168.0.10. It also prefixes +the minion id with `spongebob` + +.EXAMPLE +PS>multi-minion.ps1 -Master 192.168.0.10 -Prefix spongebob +PS>multi-minion.ps1 -m 192.168.0.10 -p spongebob + +The following example sets up a minion for the current logged in account. It +configures the minion to connect to the master at 192.168.0.10. It also starts +the minion in a hidden window: + +.EXAMPLE +PS>multi-minion.ps1 -Master 192.168.0.10 -Start +PS>multi-minion.ps1 -m 192.168.0.10 -s + +The following example removes a multiminion for the current running account: + +.EXAMPLE +PS>multi-minion.ps1 -Delete +PS>multi-minion.ps1 -d + +#> + +[CmdletBinding()] +param( + + [Parameter(Mandatory=$false)] + [Alias("m")] + # The master to connect to. This can be an ip address or an fqdn. Default + # is salt + [String] $Master = "salt", + + [Parameter(Mandatory=$false)] + [Alias("p")] + # The prefix to the minion id to differentiate it from the installed system + # minion. The default is $env:COMPUTERNAME. It might be helpful to use the + # minion id of the system minion if you know it + [String] $Prefix = "$env:COMPUTERNAME", + + [Parameter(Mandatory=$false)] + [Alias("s")] + # Start the minion in the background + [Switch] $Start, + + [Parameter(Mandatory=$false)] + [Alias("l")] + [ValidateSet( + "all", + "garbage", + "trace", + "debug", + "profile", + "info", + "warning", + "error", + "critical", + "quiet" + )] + # Set the log level for log file. Default is `warning` + [String] $LogLevel = "warning", + + [Parameter(Mandatory=$false)] + [Alias("d")] + # Remove the multi-minion in the current account. All other parameters are + # ignored + [Switch] $Remove +) + +########################### Script Variables ############################# +$user_name = [System.Security.Principal.WindowsIdentity]::GetCurrent().Name.Split("\")[-1].ToLower() +$salt_bin = "$env:ProgramFiles\Salt Project\Salt\salt-minion.exe" +$root_dir = "$env:LocalAppData\Salt Project\Salt" +$cache_dir = "$root_dir\var\cache\salt\minion" +$minion_id = "$Prefix-$user_name" + +########################### Script Functions ############################# +function Test-FileLock { + param ( + [parameter(Mandatory=$true)] + # The path to the file to check + [string]$Path + ) + if ((Test-Path -Path $Path) -eq $false) { + return $false + } + $oFile = New-Object System.IO.FileInfo $Path + try { + $oStream = $oFile.Open([System.IO.FileMode]::Open, [System.IO.FileAccess]::ReadWrite, [System.IO.FileShare]::None) + if ($oStream) { + $oStream.Close() + } + return $false + } catch { + # file is locked by a process. + return $true + } +} + +################################ Remove ################################## +if ( $Remove ) { + Write-Host "######################################################################" -ForegroundColor Cyan + Write-Host "Removing multi-minion" + Write-Host "Root Dir: $root_dir" + Write-Host "######################################################################" -ForegroundColor Cyan + + # Stop salt-minion service if running + $processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle + $processes | ForEach-Object { + if ( $_.commandline -like "*$root_dir*" ) { + Write-Host "Killing process: " -NoNewline + $process = Get-Process -Id $_.handle + $process.Kill() + if ( $process.HasExited ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } + } + } + + # Check for locked log file + # The log file will be locked until the running process releases it + while (Test-FileLock -Path "$root_dir\var\log\salt\minion") { + Start-Sleep -Seconds 1 + } + + # Remove Directory + if ( Test-Path -Path $root_dir) { + Write-Host "Removing Root Dir: " -NoNewline + Remove-Item -Path $root_dir -Force -Recurse + + if ( !(Test-Path -Path $root_dir) ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } + } + # Remind to delete keys from master + Write-Host "######################################################################" -ForegroundColor Cyan + Write-Host "Multi-Minion successfully removed" + Write-Host ">>>>> Don't forget to remove keys from the master <<<<<" + Write-Host "######################################################################" -ForegroundColor Cyan + exit 0 +} + +################################ EXISTING CHECK ################################ + +# See there is already a running minion +$running = $false +$processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle +$processes | ForEach-Object { + if ( $_.commandline -like "*$root_dir*" ) { + $running = $true + } +} +if ( $running ) { + Write-Host "######################################################################" -ForegroundColor Cyan + Write-Host "Multi-Minion" + Write-Host "A minion is already running for this user" + Write-Host "######################################################################" -ForegroundColor Cyan + exit 0 +} + +################################### INSTALL #################################### + +Write-Host "######################################################################" -ForegroundColor Cyan +Write-Host "Installing Multi-Minion" +Write-Host "Master: $Master" +Write-Host "Minion ID: $minion_id" +Write-Host "Root Directory: $root_dir" +Write-Host "######################################################################" -ForegroundColor Cyan + +# Create Root Directory Structure +if ( !( Test-Path -path "$root_dir" ) ) { + Write-Host "Creating Root Dir: " -NoNewline + New-Item -Path "$root_dir" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Config dir +if ( !( Test-Path -path "$root_dir\conf" ) ) { + Write-Host "Creating config dir: " -NoNewline + New-Item -Path "$root_dir\conf" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\conf" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Minion.d dir +if ( !( Test-Path -path "$root_dir\conf\minion.d" ) ) { + Write-Host "Creating minion.d dir: " -NoNewline + New-Item -Path "$root_dir\conf\minion.d" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\conf\minion.d" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# PKI dir +if ( !( Test-Path -path "$root_dir\conf\pki" ) ) { + Write-Host "Creating pki dir: " -NoNewline + New-Item -Path "$root_dir\conf\pki" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\conf\pki" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Log dir +if ( !( Test-Path -path "$root_dir\var\log\salt" ) ) { + Write-Host "Creating log dir: " -NoNewline + New-Item -Path "$root_dir\var\log\salt" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\var\log\salt" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Run dir +if ( !( Test-Path -path "$root_dir\var\run" ) ) { + Write-Host "Creating run dir: " -NoNewline + New-Item -Path "$root_dir\var\run" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\var\run" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Extmods grains dir +if ( !( Test-Path -path "$cache_dir\extmods\grains" ) ) { + Write-Host "Creating extmods grains dir: " -NoNewline + New-Item -Path "$cache_dir\extmods\grains" -Type Directory | Out-Null + if ( Test-Path -path "$cache_dir\extmods\grains" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Proc dir +if ( !( Test-Path -path "$cache_dir\proc" ) ) { + Write-Host "Creating proc dir: " -NoNewline + New-Item -Path "$cache_dir\proc" -Type Directory | Out-Null + if ( Test-Path -path "$cache_dir\proc" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Write minion config +Write-Host "Writing minion config: " -NoNewline +Set-Content -Force -Path "$root_dir\conf\minion" -Value "master: $Master" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "id: $minion_id" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "root_dir: $root_dir" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "log_file: $root_dir\var\log\salt\minion" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "log_level_logfile: $LogLevel" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "utils_dirs:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\var\cache\salt\minion\extmods\utils" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "winrepo_dir: $root_dir\srv\salt\win\repo" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "winrepo_dir_ng: $root_dir\srv\salt\win\repo-ng" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "file_roots:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\salt" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\spm\salt" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "pillar_roots:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\pillar" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\spm\pillar" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "thorium_roots:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\thorium" + +if ( Test-Path -path "$root_dir\conf\minion" ) { + Write-Host "Success" -ForegroundColor Green +} else { + Write-Host "Failed" -ForegroundColor Red + exit 1 +} + +# Start the minion +if ( $Start ) { + Write-Host "Starting minion process: " -NoNewline + Start-Process -FilePath "`"$salt_bin`"" ` + -ArgumentList "-c","`"$root_dir\conf`"" ` + -WindowStyle Hidden + # Verify running minion + $running = $false + $processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle + $processes | ForEach-Object { + if ( $_.commandline -like "*$root_dir*" ) { + $running = $true + } + } + if ( $running ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +Write-Host "######################################################################" -ForegroundColor Cyan +Write-Host "Multi-Minion installed successfully" +if ( ! $Start ) { + Write-Host "" + Write-Host "To start the minion, run the following command:" + Write-Host "salt-minion -c `"$root_dir\conf`"" + Write-Host "" + Write-Host "To start the minion in the background, run the following command:" + Write-Host "Start-Process -FilePath salt-minion.exe -ArgumentList `"-c`",'`"$root_dir\conf`"' -WindowStyle Hidden" +} +Write-Host "######################################################################" -ForegroundColor Cyan diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1 index a3ee01a36d3..21ce25daaa6 100644 --- a/pkg/windows/prep_salt.ps1 +++ b/pkg/windows/prep_salt.ps1 @@ -165,6 +165,25 @@ if ( ! (Test-Path -Path "$BUILD_DIR\ssm.exe") ) { } } +# Copy the multiminion scripts to the Build directory +$scripts = @( + "multi-minion.cmd", + "multi-minion.ps1" +) +$scripts | ForEach-Object { + if (!(Test-Path -Path "$BUILD_DIR\$_")) { + Write-Host "Copying $_ to the Build directory: " -NoNewline + Copy-Item -Path "$SCRIPT_DIR\$_" -Destination "$BUILD_DIR\$_" + if (Test-Path -Path "$BUILD_DIR\$_") { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } + } +} + +# Copy VCRedist 2013 to the prereqs directory New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null Write-Host "Copying VCRedist 2013 $ARCH_X to prereqs: " -NoNewline $file = "vcredist_$ARCH_X`_2013.exe" @@ -176,6 +195,7 @@ if ( Test-Path -Path "$PREREQ_DIR\$file" ) { exit 1 } +# Copy Universal C Runtimes to the prereqs directory Write-Host "Copying Universal C Runtimes $ARCH_X to prereqs: " -NoNewline $file = "ucrt_$ARCH_X.zip" Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file" diff --git a/tools/__init__.py b/tools/__init__.py index 419ec309c2f..02e6b8de903 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -8,6 +8,8 @@ ptscripts.register_tools_module("tools.docs") ptscripts.register_tools_module("tools.pkg") ptscripts.register_tools_module("tools.pkg.repo") ptscripts.register_tools_module("tools.pkg.build") +ptscripts.register_tools_module("tools.pkg.repo.create") +ptscripts.register_tools_module("tools.pkg.repo.publish") ptscripts.register_tools_module("tools.pre_commit") ptscripts.register_tools_module("tools.release") ptscripts.register_tools_module("tools.vm") diff --git a/tools/ci.py b/tools/ci.py index ba7a7c2f849..08264bb4e50 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -8,13 +8,20 @@ import json import logging import os import pathlib +import random +import sys import time -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from ptscripts import Context, command_group import tools.utils +if sys.version_info < (3, 11): + from typing_extensions import NotRequired, TypedDict +else: + from typing import NotRequired, TypedDict # pylint: disable=no-name-in-module + log = logging.getLogger(__name__) # Define the command group @@ -299,6 +306,23 @@ def define_jobs( ) return + # This is a pull-request + + labels: list[str] = [] + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is not None: + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + labels.extend( + label[0] for label in _get_pr_test_labels_from_event_payload(gh_event) + ) + if not changed_files.exists(): ctx.error(f"The '{changed_files}' file does not exist.") ctx.error( @@ -348,9 +372,16 @@ def define_jobs( changed_files_contents["workflows"], } if jobs["test-pkg"] and required_pkg_test_changes == {"false"}: - with open(github_step_summary, "a", encoding="utf-8") as wfh: - wfh.write("De-selecting the 'test-pkg' job.\n") - jobs["test-pkg"] = False + if "test:pkg" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write( + "The 'test-pkg' job is forcefully selected by the use of the 'test:pkg' label.\n" + ) + jobs["test-pkg"] = True + else: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("De-selecting the 'test-pkg' job.\n") + jobs["test-pkg"] = False if jobs["test-pkg-download"] and required_pkg_test_changes == {"false"}: with open(github_step_summary, "a", encoding="utf-8") as wfh: @@ -381,6 +412,13 @@ def define_jobs( wfh.write(f"jobs={json.dumps(jobs)}\n") +class TestRun(TypedDict): + type: str + skip_code_coverage: bool + from_filenames: NotRequired[str] + selected_tests: NotRequired[dict[str, bool]] + + @ci.command( name="define-testrun", arguments={ @@ -415,10 +453,31 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): if TYPE_CHECKING: assert github_step_summary is not None + labels: list[str] = [] + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is not None: + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + labels.extend( + label[0] for label in _get_pr_test_labels_from_event_payload(gh_event) + ) + + skip_code_coverage = True + if "test:coverage" in labels: + skip_code_coverage = False + elif event_name != "pull_request": + skip_code_coverage = False + if event_name != "pull_request": # In this case, a full test run is in order ctx.info("Writing 'testrun' to the github outputs file") - testrun = {"type": "full"} + testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage) with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"testrun={json.dumps(testrun)}\n") @@ -440,7 +499,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): ctx.exit(1) # So, it's a pull request... - # Based on which files changed, or other things like PR comments we can + # Based on which files changed, or other things like PR labels we can # decide what to run, or even if the full test run should be running on the # pull request, etc... changed_pkg_requirements_files = json.loads( @@ -455,7 +514,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): "Full test run chosen because there was a change made " "to `cicd/golden-images.json`.\n" ) - testrun = {"type": "full"} + testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage) elif changed_pkg_requirements_files or changed_test_requirements_files: with open(github_step_summary, "a", encoding="utf-8") as wfh: wfh.write( @@ -470,15 +529,20 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): ): wfh.write(f"{path}\n") wfh.write("\n\n") - testrun = {"type": "full"} + testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage) + elif "test:full" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Full test run chosen because the label `test:full` is set.\n") + testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage) else: testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt" - testrun = { - "type": "changed", - "from-filenames": str( + testrun = TestRun( + type="changed", + skip_code_coverage=skip_code_coverage, + from_filenames=str( testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT) ), - } + ) ctx.info(f"Writing {testrun_changed_files_path.name} ...") selected_changed_files = [] for fpath in json.loads(changed_files_contents["testrun_files"]): @@ -498,6 +562,28 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): if testrun["type"] == "changed": with open(github_step_summary, "a", encoding="utf-8") as wfh: wfh.write("Partial test run chosen.\n") + testrun["selected_tests"] = { + "core": False, + "slow": False, + "fast": True, + "flaky": False, + } + if "test:slow" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Slow tests chosen by `test:slow` label.\n") + testrun["selected_tests"]["slow"] = True + if "test:core" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Core tests chosen by `test:core` label.\n") + testrun["selected_tests"]["core"] = True + if "test:no-fast" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Fast tests deselected by `test:no-fast` label.\n") + testrun["selected_tests"]["fast"] = False + if "test:flaky-jail" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Flaky jailed tests chosen by `test:flaky-jail` label.\n") + testrun["selected_tests"]["flaky"] = True if selected_changed_files: with open(github_step_summary, "a", encoding="utf-8") as wfh: wfh.write( @@ -585,18 +671,29 @@ def transport_matrix(ctx: Context, distro_slug: str): "help": "The distribution slug to generate the matrix for", }, "pkg_type": { - "help": "The distribution slug to generate the matrix for", + "help": "The type of package we are testing against", + }, + "testing_releases": { + "help": "The salt releases to test upgrades against", + "nargs": "+", + "required": True, }, }, ) -def pkg_matrix(ctx: Context, distro_slug: str, pkg_type: str): +def pkg_matrix( + ctx: Context, + distro_slug: str, + pkg_type: str, + testing_releases: list[tools.utils.Version] = None, +): """ Generate the test matrix. """ github_output = os.environ.get("GITHUB_OUTPUT") if github_output is None: ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") - + if TYPE_CHECKING: + assert testing_releases matrix = [] sessions = [ "install", @@ -613,11 +710,12 @@ def pkg_matrix(ctx: Context, distro_slug: str, pkg_type: str): and pkg_type != "MSI" ): # These OS's never had arm64 packages built for them - # with the tiamate onedir packages. + # with the tiamat onedir packages. # we will need to ensure when we release 3006.0 # we allow for 3006.0 jobs to run, because then # we will have arm64 onedir packages to upgrade from sessions.append("upgrade") + # TODO: Remove this block when we reach version 3009.0, we will no longer be testing upgrades from classic packages if ( distro_slug not in [ @@ -633,11 +731,22 @@ def pkg_matrix(ctx: Context, distro_slug: str, pkg_type: str): sessions.append("upgrade-classic") for session in sessions: - matrix.append( - { - "test-chunk": session, - } - ) + versions: list[str | None] = [None] + if session == "upgrade": + versions = [str(version) for version in testing_releases] + elif session == "upgrade-classic": + versions = [ + str(version) + for version in testing_releases + if version < tools.utils.Version("3006.0") + ] + for version in versions: + matrix.append( + { + "test-chunk": session, + "version": version, + } + ) ctx.info("Generated matrix:") ctx.print(matrix, soft_wrap=True) @@ -672,3 +781,245 @@ def get_releases(ctx: Context, repository: str = "saltstack/salt"): wfh.write(f"latest-release={latest}\n") wfh.write(f"releases={json.dumps(str_releases)}\n") ctx.exit(0) + + +@ci.command( + name="get-pr-test-labels", + arguments={ + "pr": { + "help": "Pull request number", + }, + "repository": { + "help": "Github repository.", + }, + }, +) +def get_pr_test_labels( + ctx: Context, repository: str = "saltstack/salt", pr: int = None +): + """ + Set the pull-request labels. + """ + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is None: + labels = _get_pr_test_labels_from_api(ctx, repository, pr=pr) + else: + if TYPE_CHECKING: + assert gh_event_path is not None + + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + if "pull_request" not in gh_event: + ctx.warning("The 'pull_request' key was not found on the event payload.") + ctx.exit(1) + + pr = gh_event["pull_request"]["number"] + labels = _get_pr_test_labels_from_event_payload(gh_event) + + if labels: + ctx.info(f"Test labels for pull-request #{pr} on {repository}:") + for name, description in labels: + ctx.info(f" * [yellow]{name}[/yellow]: {description}") + else: + ctx.info(f"No test labels for pull-request #{pr} on {repository}") + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.exit(0) + + if TYPE_CHECKING: + assert github_output is not None + + ctx.info("Writing 'labels' to the github outputs file") + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"labels={json.dumps([label[0] for label in labels])}\n") + ctx.exit(0) + + +def _get_pr_test_labels_from_api( + ctx: Context, repository: str = "saltstack/salt", pr: int = None +) -> list[tuple[str, str]]: + """ + Set the pull-request labels. + """ + if pr is None: + ctx.error( + "Could not find the 'GITHUB_EVENT_PATH' variable and the " + "--pr flag was not passed. Unable to detect pull-request number." + ) + ctx.exit(1) + with ctx.web as web: + headers = { + "Accept": "application/vnd.github+json", + } + if "GITHUB_TOKEN" in os.environ: + headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}" + web.headers.update(headers) + ret = web.get(f"https://api.github.com/repos/{repository}/pulls/{pr}") + if ret.status_code != 200: + ctx.error( + f"Failed to get the #{pr} pull-request details on repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + pr_details = ret.json() + return _filter_test_labels(pr_details["labels"]) + + +def _get_pr_test_labels_from_event_payload( + gh_event: dict[str, Any] +) -> list[tuple[str, str]]: + """ + Get the pull-request test labels. + """ + if "pull_request" not in gh_event: + return [] + return _filter_test_labels(gh_event["pull_request"]["labels"]) + + +def _filter_test_labels(labels: list[dict[str, Any]]) -> list[tuple[str, str]]: + return [ + (label["name"], label["description"]) + for label in labels + if label["name"].startswith("test:") + ] + + +@ci.command( + name="get-testing-releases", + arguments={ + "releases": { + "help": "The list of releases of salt", + "nargs": "*", + }, + "salt_version": { + "help": "The version of salt being tested against", + "required": True, + }, + }, +) +def get_testing_releases( + ctx: Context, + releases: list[tools.utils.Version], + salt_version: str = None, +): + """ + Get a list of releases to use for the upgrade and downgrade tests. + """ + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.exit(1, "The 'GITHUB_OUTPUT' variable is not set.") + else: + # We aren't testing upgrades from anything before 3006.0 except the latest 3005.x + threshold_major = 3006 + parsed_salt_version = tools.utils.Version(salt_version) + # We want the latest 4 major versions, removing the oldest if this version is a new major + num_major_versions = 4 + if parsed_salt_version.minor == 0: + num_major_versions = 3 + majors = sorted( + list( + { + version.major + for version in releases + if version.major >= threshold_major + } + ) + )[-num_major_versions:] + testing_releases = [] + # Append the latest minor for each major + for major in majors: + minors_of_major = [ + version for version in releases if version.major == major + ] + testing_releases.append(minors_of_major[-1]) + + # TODO: Remove this block when we reach version 3009.0 + # Append the latest minor version of 3005 if we don't have enough major versions to test against + if len(testing_releases) != num_major_versions: + url = "https://repo.saltproject.io/salt/onedir/repo.json" + ret = ctx.web.get(url) + repo_data = ret.json() + latest = list(repo_data["latest"].keys())[0] + version = repo_data["latest"][latest]["version"] + testing_releases = [version] + testing_releases + + str_releases = [str(version) for version in testing_releases] + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"testing-releases={json.dumps(str_releases)}\n") + + ctx.exit(0) + + +@ci.command( + name="define-cache-seed", + arguments={ + "static_cache_seed": { + "help": "The static cache seed value", + }, + "randomize": { + "help": "Randomize the cache seed value", + }, + }, +) +def define_cache_seed(ctx: Context, static_cache_seed: str, randomize: bool = False): + """ + Set `cache-seed` in GH Actions outputs. + """ + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert github_output is not None + + github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY") + if github_step_summary is None: + ctx.warn("The 'GITHUB_STEP_SUMMARY' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert github_step_summary is not None + + labels: list[str] = [] + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is not None: + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + labels.extend( + label[0] for label in _get_pr_test_labels_from_event_payload(gh_event) + ) + + if randomize is True: + cache_seed = f"SEED-{random.randint(100, 1000)}" + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write( + f"The cache seed has been randomized to `{cache_seed}` because " + "`--randomize` was passed to `tools ci define-cache-seed`." + ) + elif "test:random-cache-seed" in labels: + cache_seed = f"SEED-{random.randint(100, 1000)}" + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write( + f"The cache seed has been randomized to `{cache_seed}` because " + "the label `test:random-cache-seed` was set." + ) + else: + cache_seed = static_cache_seed + + ctx.info("Writing 'cache-seed' to the github outputs file") + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"cache-seed={cache_seed}\n") diff --git a/tools/pkg/build.py b/tools/pkg/build.py index b373338a99e..d061531d11f 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -80,11 +80,18 @@ def debian( ctx.exit(1) ctx.info("Building the package from the source files") shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version new_env = { - "SALT_RELENV_VERSION": relenv_version or shared_constants["relenv_version"], - "SALT_PYTHON_VERSION": python_version - or shared_constants["python_version_linux"], + "SALT_RELENV_VERSION": relenv_version, + "SALT_PYTHON_VERSION": python_version, "SALT_PACKAGE_ARCH": str(arch), + "RELENV_FETCH_VERSION": relenv_version, } for key, value in new_env.items(): os.environ[key] = value @@ -140,11 +147,18 @@ def rpm( ctx.exit(1) ctx.info(f"Building the package from the source files") shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version new_env = { - "SALT_RELENV_VERSION": relenv_version or shared_constants["relenv_version"], - "SALT_PYTHON_VERSION": python_version - or shared_constants["python_version_linux"], + "SALT_RELENV_VERSION": relenv_version, + "SALT_PYTHON_VERSION": python_version, "SALT_PACKAGE_ARCH": str(arch), + "RELENV_FETCH_VERSION": relenv_version, } for key, value in new_env.items(): os.environ[key] = value @@ -160,7 +174,6 @@ def rpm( arguments={ "onedir": { "help": "The name of the onedir artifact, if given it should be under artifacts/", - "required": True, }, "salt_version": { "help": ( @@ -172,10 +185,21 @@ def rpm( "sign": { "help": "Sign and notorize built package", }, + "relenv_version": { + "help": "The version of relenv to use", + }, + "python_version": { + "help": "The version of python to build with using relenv", + }, }, ) def macos( - ctx: Context, onedir: str = None, salt_version: str = None, sign: bool = False + ctx: Context, + onedir: str = None, + salt_version: str = None, + sign: bool = False, + relenv_version: str = None, + python_version: str = None, ): """ Build the macOS package. @@ -185,15 +209,43 @@ def macos( assert salt_version is not None checkout = pathlib.Path.cwd() - onedir_artifact = checkout / "artifacts" / onedir - _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) + if onedir: + onedir_artifact = checkout / "artifacts" / onedir + ctx.info(f"Building package from existing onedir: {str(onedir_artifact)}") + _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) - build_root = checkout / "pkg" / "macos" / "build" / "opt" - build_root.mkdir(parents=True, exist_ok=True) - ctx.info(f"Extracting the onedir artifact to {build_root}") - with tarfile.open(str(onedir_artifact)) as tarball: - with ctx.chdir(onedir_artifact.parent): - tarball.extractall(path=build_root) + build_root = checkout / "pkg" / "macos" / "build" / "opt" + build_root.mkdir(parents=True, exist_ok=True) + ctx.info(f"Extracting the onedir artifact to {build_root}") + with tarfile.open(str(onedir_artifact)) as tarball: + with ctx.chdir(onedir_artifact.parent): + tarball.extractall(path=build_root) + else: + ctx.info("Building package without an existing onedir") + + if not onedir: + # Prep the salt onedir if not building from an existing one + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version + with ctx.chdir(checkout / "pkg" / "macos"): + ctx.info("Fetching relenv python") + ctx.run( + "./build_python.sh", + "--version", + python_version, + "--relenv-version", + relenv_version, + ) + + ctx.info("Installing salt into the relenv python") + ctx.run("./install_salt.sh") if sign: ctx.info("Signing binaries") @@ -222,7 +274,6 @@ def macos( arguments={ "onedir": { "help": "The name of the onedir artifact, if given it should be under artifacts/", - "required": True, }, "salt_version": { "help": ( @@ -237,7 +288,13 @@ def macos( "required": True, }, "sign": { - "help": "Sign and notorize built package", + "help": "Sign and notarize built package", + }, + "relenv_version": { + "help": "The version of relenv to use", + }, + "python_version": { + "help": "The version of python to build with using relenv", }, }, ) @@ -247,33 +304,27 @@ def windows( salt_version: str = None, arch: str = None, sign: bool = False, + relenv_version: str = None, + python_version: str = None, ): """ Build the Windows package. """ if TYPE_CHECKING: - assert onedir is not None assert salt_version is not None assert arch is not None - checkout = pathlib.Path.cwd() - onedir_artifact = checkout / "artifacts" / onedir - _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version - unzip_dir = checkout / "pkg" / "windows" - ctx.info(f"Unzipping the onedir artifact to {unzip_dir}") - with zipfile.ZipFile(onedir_artifact, mode="r") as archive: - archive.extractall(unzip_dir) - - move_dir = unzip_dir / "salt" - build_env = unzip_dir / "buildenv" - _check_pkg_build_files_exist(ctx, move_dir=move_dir) - - ctx.info(f"Moving {move_dir} directory to the build environment in {build_env}") - shutil.move(move_dir, build_env) - - ctx.info("Building the windows package") - ctx.run( + build_cmd = [ "powershell.exe", "&", "pkg/windows/build.cmd", @@ -281,9 +332,37 @@ def windows( arch, "-Version", salt_version, + "-PythonVersion", + python_version, + "-RelenvVersion", + relenv_version, "-CICD", - "-SkipInstall", - ) + ] + + checkout = pathlib.Path.cwd() + if onedir: + build_cmd.append("-SkipInstall") + onedir_artifact = checkout / "artifacts" / onedir + ctx.info(f"Building package from existing onedir: {str(onedir_artifact)}") + _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) + + unzip_dir = checkout / "pkg" / "windows" + ctx.info(f"Unzipping the onedir artifact to {unzip_dir}") + with zipfile.ZipFile(onedir_artifact, mode="r") as archive: + archive.extractall(unzip_dir) + + move_dir = unzip_dir / "salt" + build_env = unzip_dir / "buildenv" + _check_pkg_build_files_exist(ctx, move_dir=move_dir) + + ctx.info(f"Moving {move_dir} directory to the build environment in {build_env}") + shutil.move(move_dir, build_env) + else: + build_cmd.append("-Build") + ctx.info("Building package without an existing onedir") + + ctx.info(f"Running: {' '.join(build_cmd)} ...") + ctx.run(*build_cmd) if sign: env = os.environ.copy() @@ -364,6 +443,9 @@ def windows( "help": "The version of python to create an environment for using relenv", "required": True, }, + "relenv_version": { + "help": "The version of relenv to use", + }, "package_name": { "help": "The name of the relenv environment to be created", "required": True, @@ -378,6 +460,7 @@ def onedir_dependencies( ctx: Context, arch: str = None, python_version: str = None, + relenv_version: str = None, package_name: str = None, platform: str = None, ): @@ -392,6 +475,16 @@ def onedir_dependencies( assert package_name is not None assert platform is not None + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version + # We import relenv here because it is not a hard requirement for the rest of the tools commands try: import relenv.create @@ -414,12 +507,11 @@ def onedir_dependencies( ctx.error(f"Failed to get the relenv version: {ret}") ctx.exit(1) - target_relenv_version = _get_shared_constants()["relenv_version"] env_relenv_version = ret.stdout.strip().decode() - if env_relenv_version != target_relenv_version: + if env_relenv_version != relenv_version: ctx.error( f"The onedir installed relenv version({env_relenv_version}) is not " - f"the relenv version which should be used({target_relenv_version})." + f"the relenv version which should be used({relenv_version})." ) ctx.exit(1) @@ -510,6 +602,9 @@ def onedir_dependencies( "help": "The name of the relenv environment to install salt into", "required": True, }, + "relenv_version": { + "help": "The version of relenv to use", + }, }, ) def salt_onedir( @@ -517,6 +612,7 @@ def salt_onedir( salt_name: str, platform: str = None, package_name: str = None, + relenv_version: str = None, ): """ Install salt into a relenv onedir environment. @@ -525,6 +621,13 @@ def salt_onedir( assert platform is not None assert package_name is not None + shared_constants = _get_shared_constants() + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version + salt_archive = pathlib.Path(salt_name).resolve() onedir_env = pathlib.Path(package_name).resolve() _check_pkg_build_files_exist(ctx, onedir_env=onedir_env, salt_archive=salt_archive) @@ -542,12 +645,11 @@ def salt_onedir( ctx.error(f"Failed to get the relenv version: {ret}") ctx.exit(1) - target_relenv_version = _get_shared_constants()["relenv_version"] env_relenv_version = ret.stdout.strip().decode() - if env_relenv_version != target_relenv_version: + if env_relenv_version != relenv_version: ctx.error( f"The onedir installed relenv version({env_relenv_version}) is not " - f"the relenv version which should be used({target_relenv_version})." + f"the relenv version which should be used({relenv_version})." ) ctx.exit(1) diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py deleted file mode 100644 index d781cf3c8ff..00000000000 --- a/tools/pkg/repo.py +++ /dev/null @@ -1,1906 +0,0 @@ -""" -These commands are used to build the pacakge repository files. -""" -# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated -from __future__ import annotations - -import fnmatch -import hashlib -import json -import logging -import os -import pathlib -import re -import shutil -import sys -import tempfile -import textwrap -from datetime import datetime -from typing import TYPE_CHECKING, Any - -import packaging.version -from ptscripts import Context, command_group - -import tools.pkg -import tools.utils -from tools.utils import Version, get_salt_releases - -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - -log = logging.getLogger(__name__) - -# Define the command group -repo = command_group( - name="repo", - help="Packaging Repository Related Commands", - description=__doc__, - parent="pkg", -) - -create = command_group( - name="create", help="Packaging Repository Creation Related Commands", parent=repo -) - -publish = command_group( - name="publish", - help="Packaging Repository Publication Related Commands", - parent=repo, -) - - -_deb_distro_info = { - "debian": { - "10": { - "label": "deb10ary", - "codename": "buster", - "suitename": "oldstable", - }, - "11": { - "label": "deb11ary", - "codename": "bullseye", - "suitename": "stable", - }, - }, - "ubuntu": { - "20.04": { - "label": "salt_ubuntu2004", - "codename": "focal", - }, - "22.04": { - "label": "salt_ubuntu2204", - "codename": "jammy", - }, - }, -} - - -@create.command( - name="deb", - arguments={ - "salt_version": { - "help": ( - "The salt version for which to build the repository configuration files. " - "If not passed, it will be discovered by running 'python3 salt/version.py'." - ), - "required": True, - }, - "distro": { - "help": "The debian based distribution to build the repository for", - "choices": list(_deb_distro_info), - "required": True, - }, - "distro_version": { - "help": "The distro version.", - "required": True, - }, - "distro_arch": { - "help": "The distribution architecture", - "choices": ("x86_64", "amd64", "aarch64", "arm64"), - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def debian( - ctx: Context, - salt_version: str = None, - distro: str = None, - distro_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - distro_arch: str = "amd64", - nightly_build_from: str = None, -): - """ - Create the debian repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert distro is not None - assert distro_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - display_name = f"{distro.capitalize()} {distro_version}" - if distro_version not in _deb_distro_info[distro]: - ctx.error(f"Support for {display_name} is missing.") - ctx.exit(1) - - if distro_arch == "x86_64": - ctx.info(f"The {distro_arch} arch is an alias for 'amd64'. Adjusting.") - distro_arch = "amd64" - - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - - distro_details = _deb_distro_info[distro][distro_version] - - ctx.info("Distribution Details:") - ctx.info(distro_details) - if TYPE_CHECKING: - assert isinstance(distro_details["label"], str) - assert isinstance(distro_details["codename"], str) - assert isinstance(distro_details["suitename"], str) - label: str = distro_details["label"] - codename: str = distro_details["codename"] - - ftp_archive_config_suite = "" - if distro == "debian": - suitename: str = distro_details["suitename"] - ftp_archive_config_suite = ( - f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" - ) - archive_description = f"SaltProject {display_name} Python 3{'' if not nightly_build_from else ' development'} Salt package repo" - ftp_archive_config = f"""\ - APT::FTPArchive::Release::Origin "SaltProject"; - APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} - APT::FTPArchive::Release::Codename "{codename}"; - APT::FTPArchive::Release::Architectures "{distro_arch}"; - APT::FTPArchive::Release::Components "main"; - APT::FTPArchive::Release::Description "{archive_description}"; - APT::FTPArchive::Release::Acquire-By-Hash "yes"; - Dir {{ - ArchiveDir "."; - }}; - BinDirectory "pool" {{ - Packages "dists/{codename}/main/binary-{distro_arch}/Packages"; - Sources "dists/{codename}/main/source/Sources"; - Contents "dists/{codename}/main/Contents-{distro_arch}"; - }} - """ - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" - ctx.info(f"Writing {ftp_archive_config_file} ...") - ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config)) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - pool_path = create_repo_path / "pool" - pool_path.mkdir(exist_ok=True) - for fpath in incoming.iterdir(): - dpath = pool_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if fpath.suffix == ".dsc": - ctx.info(f"Running 'debsign' on {dpath} ...") - ctx.run("debsign", "--re-sign", "-k", key_id, str(dpath), interactive=True) - - dists_path = create_repo_path / "dists" - symlink_parent_path = dists_path / codename / "main" - symlink_paths = ( - symlink_parent_path / "by-hash" / "SHA256", - symlink_parent_path / "source" / "by-hash" / "SHA256", - symlink_parent_path / f"binary-{distro_arch}" / "by-hash" / "SHA256", - ) - - for path in symlink_paths: - path.mkdir(exist_ok=True, parents=True) - - cmdline = ["apt-ftparchive", "generate", "apt-ftparchive.conf"] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - - ctx.info("Creating by-hash symlinks ...") - for path in symlink_paths: - for fpath in path.parent.parent.iterdir(): - if not fpath.is_file(): - continue - sha256sum = ctx.run("sha256sum", str(fpath), capture=True) - link = path / sha256sum.stdout.decode().split()[0] - link.symlink_to(f"../../{fpath.name}") - - cmdline = [ - "apt-ftparchive", - "--no-md5", - "--no-sha1", - "--no-sha512", - "release", - "-c", - "apt-ftparchive.conf", - f"dists/{codename}/", - ] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ret = ctx.run(*cmdline, capture=True, cwd=create_repo_path) - release_file = dists_path / codename / "Release" - ctx.info(f"Writing {release_file} with the output of the previous command...") - release_file.write_bytes(ret.stdout) - - cmdline = [ - "gpg", - "-u", - key_id, - "-o", - f"dists/{codename}/InRelease", - "-a", - "-s", - "--clearsign", - f"dists/{codename}/Release", - ] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - - cmdline = [ - "gpg", - "-u", - key_id, - "-o", - f"dists/{codename}/Release.gpg", - "-a", - "-b", - "-s", - f"dists/{codename}/Release", - ] - - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - if not nightly_build_from: - remote_versions = _get_remote_versions( - tools.utils.STAGING_BUCKET_NAME, - create_repo_path.parent.relative_to(repo_path), - ) - major_version = Version(salt_version).major - matching_major = None - for version in remote_versions: - if version.major == major_version: - matching_major = version - break - if not matching_major or matching_major <= salt_version: - major_link = create_repo_path.parent.parent / str(major_version) - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - major_link.symlink_to(f"minor/{salt_version}") - if not remote_versions or remote_versions[0] <= salt_version: - latest_link = create_repo_path.parent.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(f"minor/{salt_version}") - - ctx.info("Done") - - -_rpm_distro_info = { - "amazon": ["2"], - "redhat": ["7", "8", "9"], - "fedora": ["36", "37", "38"], - "photon": ["3", "4"], -} - - -@create.command( - name="rpm", - arguments={ - "salt_version": { - "help": ( - "The salt version for which to build the repository configuration files. " - "If not passed, it will be discovered by running 'python3 salt/version.py'." - ), - "required": True, - }, - "distro": { - "help": "The debian based distribution to build the repository for", - "choices": list(_rpm_distro_info), - "required": True, - }, - "distro_version": { - "help": "The distro version.", - "required": True, - }, - "distro_arch": { - "help": "The distribution architecture", - "choices": ("x86_64", "aarch64", "arm64"), - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def rpm( - ctx: Context, - salt_version: str = None, - distro: str = None, - distro_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - distro_arch: str = "amd64", - nightly_build_from: str = None, -): - """ - Create the redhat repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert distro is not None - assert distro_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - display_name = f"{distro.capitalize()} {distro_version}" - if distro_version not in _rpm_distro_info[distro]: - ctx.error(f"Support for {display_name} is missing.") - ctx.exit(1) - - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - for fpath in incoming.iterdir(): - if ".src" in fpath.suffixes: - dpath = create_repo_path / "SRPMS" / fpath.name - else: - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if fpath.suffix == ".rpm": - ctx.info(f"Running 'rpmsign' on {dpath} ...") - ctx.run( - "rpmsign", - "--key-id", - key_id, - "--addsign", - "--digest-algo=sha256", - str(dpath), - ) - - createrepo = shutil.which("createrepo") - if createrepo is None: - container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9" - ctx.info(f"Using docker container '{container}' to call 'createrepo'...") - uid = ctx.run("id", "-u", capture=True).stdout.strip().decode() - gid = ctx.run("id", "-g", capture=True).stdout.strip().decode() - ctx.run( - "docker", - "run", - "--rm", - "-v", - f"{create_repo_path.resolve()}:/code", - "-u", - f"{uid}:{gid}", - "-w", - "/code", - container, - "createrepo", - ".", - ) - else: - ctx.run("createrepo", ".", cwd=create_repo_path) - - if nightly_build_from: - repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") - else: - repo_domain = os.environ.get( - "SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io" - ) - - salt_repo_user = os.environ.get("SALT_REPO_USER") - if salt_repo_user: - log.info( - "SALT_REPO_USER: %s", - salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1], - ) - salt_repo_pass = os.environ.get("SALT_REPO_PASS") - if salt_repo_pass: - log.info( - "SALT_REPO_PASS: %s", - salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1], - ) - if salt_repo_user and salt_repo_pass: - repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}" - - def _create_repo_file(create_repo_path, url_suffix): - ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") - if nightly_build_from: - base_url = f"salt-dev/{nightly_build_from}/" - repo_file_contents = "[salt-nightly-repo]" - elif "rc" in salt_version: - base_url = "salt_rc/" - repo_file_contents = "[salt-rc-repo]" - else: - base_url = "" - repo_file_contents = "[salt-repo]" - base_url += f"salt/py3/{distro}/{distro_version}/{distro_arch}/{url_suffix}" - if distro == "amazon": - distro_name = "Amazon Linux" - elif distro == "redhat": - distro_name = "RHEL/CentOS" - else: - distro_name = distro.capitalize() - - if distro != "photon" and int(distro_version) < 8: - failovermethod = "\n failovermethod=priority" - else: - failovermethod = "" - - repo_file_contents += textwrap.dedent( - f""" - name=Salt repo for {distro_name} {distro_version} PY3 - baseurl=https://{repo_domain}/{base_url} - skip_if_unavailable=True{failovermethod} - priority=10 - enabled=1 - enabled_metadata=1 - gpgcheck=1 - gpgkey=https://{repo_domain}/{base_url}/{tools.utils.GPG_KEY_FILENAME}.pub - """ - ) - create_repo_path.write_text(repo_file_contents) - - if nightly_build_from: - repo_file_path = create_repo_path.parent / "nightly.repo" - else: - repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" - - _create_repo_file(repo_file_path, f"minor/{salt_version}") - - if not nightly_build_from: - remote_versions = _get_remote_versions( - tools.utils.STAGING_BUCKET_NAME, - create_repo_path.parent.relative_to(repo_path), - ) - major_version = Version(salt_version).major - matching_major = None - for version in remote_versions: - if version.major == major_version: - matching_major = version - break - if not matching_major or matching_major <= salt_version: - major_link = create_repo_path.parent.parent / str(major_version) - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - major_link.symlink_to(f"minor/{salt_version}") - repo_file_path = create_repo_path.parent.parent / f"{major_version}.repo" - _create_repo_file(repo_file_path, str(major_version)) - if not remote_versions or remote_versions[0] <= salt_version: - latest_link = create_repo_path.parent.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(f"minor/{salt_version}") - repo_file_path = create_repo_path.parent.parent / "latest.repo" - _create_repo_file(repo_file_path, "latest") - - ctx.info("Done") - - -@create.command( - name="windows", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def windows( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the windows repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build_from=nightly_build_from, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="windows", - pkg_suffixes=(".msi", ".exe"), - ) - ctx.info("Done") - - -@create.command( - name="macos", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def macos( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the windows repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build_from=nightly_build_from, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="macos", - pkg_suffixes=(".pkg",), - ) - ctx.info("Done") - - -@create.command( - name="onedir", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def onedir( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the onedir repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build_from=nightly_build_from, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="onedir", - pkg_suffixes=(".xz", ".zip"), - ) - ctx.info("Done") - - -@create.command( - name="src", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def src( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the onedir repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro="src", - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - create_repo_path = create_repo_path / salt_version - create_repo_path.mkdir(exist_ok=True, parents=True) - hashes_base_path = create_repo_path / f"salt-{salt_version}" - for fpath in incoming.iterdir(): - if fpath.suffix not in (".gz",): - continue - ctx.info(f"* Processing {fpath} ...") - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - for hash_name in ("blake2b", "sha512", "sha3_512"): - ctx.info(f" * Calculating {hash_name} ...") - hexdigest = _get_file_checksum(fpath, hash_name) - with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - with open(f"{dpath}.{hash_name}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - - for fpath in create_repo_path.iterdir(): - if fpath.suffix in (".pub", ".gpg"): - continue - tools.utils.gpg_sign(ctx, key_id, fpath) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - ctx.info("Done") - - -@publish.command( - arguments={ - "repo_path": { - "help": "Local path for the repository that shall be published.", - }, - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - } -) -def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): - """ - Publish to the nightly bucket. - """ - if TYPE_CHECKING: - assert salt_version is not None - _publish_repo( - ctx, repo_path=repo_path, nightly_build=True, salt_version=salt_version - ) - - -@publish.command( - arguments={ - "repo_path": { - "help": "Local path for the repository that shall be published.", - }, - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - } -) -def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): - """ - Publish to the staging bucket. - """ - if TYPE_CHECKING: - assert salt_version is not None - _publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version) - - -@repo.command(name="backup-previous-releases") -def backup_previous_releases(ctx: Context): - """ - Backup release bucket. - """ - _rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME) - ctx.info("Done") - - -@repo.command(name="restore-previous-releases") -def restore_previous_releases(ctx: Context): - """ - Restore release bucket from backup. - """ - _rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME) - github_output = os.environ.get("GITHUB_OUTPUT") - if github_output is not None: - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"backup-complete=true\n") - ctx.info("Done") - - -def _rclone(ctx: Context, src: str, dst: str): - rclone = shutil.which("rclone") - if not rclone: - ctx.error("Could not find the rclone binary") - ctx.exit(1) - - if TYPE_CHECKING: - assert rclone - - env = os.environ.copy() - env["RCLONE_CONFIG_S3_TYPE"] = "s3" - cmdline: list[str] = [ - rclone, - "sync", - "--auto-confirm", - "--human-readable", - "--checksum", - "--color=always", - "--metadata", - "--s3-env-auth", - "--s3-location-constraint=us-west-2", - "--s3-provider=AWS", - "--s3-region=us-west-2", - "--stats-file-name-length=0", - "--stats-one-line", - "--stats=5s", - "--transfers=50", - "--fast-list", - "--verbose", - ] - if src == tools.utils.RELEASE_BUCKET_NAME: - cmdline.append("--s3-storage-class=INTELLIGENT_TIERING") - cmdline.extend([f"s3://{src}", f"s3://{dst}"]) - ctx.info(f"Running: {' '.join(cmdline)}") - ret = ctx.run(*cmdline, env=env, check=False) - if ret.returncode: - ctx.error(f"Failed to sync from s3://{src} to s3://{dst}") - ctx.exit(1) - - -@publish.command( - arguments={ - "salt_version": { - "help": "The salt version to release.", - }, - } -) -def release(ctx: Context, salt_version: str): - """ - Publish to the release bucket. - """ - if "rc" in salt_version: - bucket_folder = "salt_rc/salt/py3" - else: - bucket_folder = "salt/py3" - - files_to_copy: list[str] - directories_to_delete: list[str] = [] - - ctx.info("Grabbing remote file listing of files to copy...") - s3 = boto3.client("s3") - repo_release_files_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-files.json" - ) - repo_release_symlinks_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-symlinks.json" - ) - with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: - local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name - try: - bucket_name = tools.utils.STAGING_BUCKET_NAME - with local_release_files_path.open("wb") as wfh: - ctx.info( - f"Downloading {repo_release_files_path} from bucket {bucket_name} ..." - ) - s3.download_fileobj( - Bucket=bucket_name, - Key=str(repo_release_files_path), - Fileobj=wfh, - ) - files_to_copy = json.loads(local_release_files_path.read_text()) - except ClientError as exc: - if "Error" not in exc.response: - log.exception(f"Error downloading {repo_release_files_path}: {exc}") - ctx.exit(1) - if exc.response["Error"]["Code"] == "404": - ctx.error(f"Could not find {repo_release_files_path} in bucket.") - ctx.exit(1) - if exc.response["Error"]["Code"] == "400": - ctx.error( - f"Could not download {repo_release_files_path} from bucket: {exc}" - ) - ctx.exit(1) - log.exception(f"Error downloading {repo_release_files_path}: {exc}") - ctx.exit(1) - local_release_symlinks_path = ( - pathlib.Path(tsd) / repo_release_symlinks_path.name - ) - try: - with local_release_symlinks_path.open("wb") as wfh: - ctx.info( - f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..." - ) - s3.download_fileobj( - Bucket=bucket_name, - Key=str(repo_release_symlinks_path), - Fileobj=wfh, - ) - directories_to_delete = json.loads(local_release_symlinks_path.read_text()) - except ClientError as exc: - if "Error" not in exc.response: - log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") - ctx.exit(1) - if exc.response["Error"]["Code"] == "404": - ctx.error(f"Could not find {repo_release_symlinks_path} in bucket.") - ctx.exit(1) - if exc.response["Error"]["Code"] == "400": - ctx.error( - f"Could not download {repo_release_symlinks_path} from bucket: {exc}" - ) - ctx.exit(1) - log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") - ctx.exit(1) - - if directories_to_delete: - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Deleting directories to override.", - total=len(directories_to_delete), - ) - for directory in directories_to_delete: - try: - objects_to_delete: list[dict[str, str]] = [] - for path in _get_repo_file_list( - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=f"{directory}/**", - ): - objects_to_delete.append({"Key": path}) - if objects_to_delete: - s3.delete_objects( - Bucket=tools.utils.RELEASE_BUCKET_NAME, - Delete={"Objects": objects_to_delete}, - ) - except ClientError: - log.exception("Failed to delete remote files") - finally: - progress.update(task, advance=1) - - already_copied_files: list[str] = [] - s3 = boto3.client("s3") - dot_repo_files = [] - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Copying files between buckets", total=len(files_to_copy) - ) - for fpath in files_to_copy: - if fpath in already_copied_files: - continue - if fpath.endswith(".repo"): - dot_repo_files.append(fpath) - ctx.info(f" * Copying {fpath}") - try: - s3.copy_object( - Bucket=tools.utils.RELEASE_BUCKET_NAME, - Key=fpath, - CopySource={ - "Bucket": tools.utils.STAGING_BUCKET_NAME, - "Key": fpath, - }, - MetadataDirective="COPY", - TaggingDirective="COPY", - ServerSideEncryption="AES256", - ) - already_copied_files.append(fpath) - except ClientError: - log.exception(f"Failed to copy {fpath}") - finally: - progress.update(task, advance=1) - - # Now let's get the onedir based repositories where we need to update several repo.json - major_version = packaging.version.parse(salt_version).major - with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: - repo_path = pathlib.Path(tsd) - for distro in ("windows", "macos", "onedir"): - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro=distro, - ) - repo_json_path = create_repo_path.parent.parent / "repo.json" - - release_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=repo_json_path, - ) - minor_repo_json_path = create_repo_path.parent / "repo.json" - - staging_minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.STAGING_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - release_minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - - release_json = staging_minor_repo_json[salt_version] - - major_version = Version(salt_version).major - versions = _parse_versions(*list(release_minor_repo_json)) - ctx.info( - f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in versions)}" - ) - minor_versions = [v for v in versions if v.major == major_version] - ctx.info( - f"Collected versions(Matching major: {major_version}) from " - f"{minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in minor_versions)}" - ) - if not versions: - latest_version = Version(salt_version) - else: - latest_version = versions[0] - if not minor_versions: - latest_minor_version = Version(salt_version) - else: - latest_minor_version = minor_versions[0] - - ctx.info(f"Release Version: {salt_version}") - ctx.info(f"Latest Repo Version: {latest_version}") - ctx.info(f"Latest Release Minor Version: {latest_minor_version}") - - # Add the minor version - release_minor_repo_json[salt_version] = release_json - - if latest_version <= salt_version: - release_repo_json["latest"] = release_json - - if latest_minor_version <= salt_version: - release_minor_repo_json["latest"] = release_json - - ctx.info(f"Writing {minor_repo_json_path} ...") - minor_repo_json_path.write_text( - json.dumps(release_minor_repo_json, sort_keys=True) - ) - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True)) - - # And now, let's get the several rpm "*.repo" files to update the base - # domain from staging to release - release_domain = os.environ.get( - "SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io" - ) - for path in dot_repo_files: - repo_file_path = repo_path.joinpath(path) - repo_file_path.parent.mkdir(exist_ok=True, parents=True) - bucket_name = tools.utils.STAGING_BUCKET_NAME - try: - ret = s3.head_object(Bucket=bucket_name, Key=path) - ctx.info( - f"Downloading existing '{repo_file_path.relative_to(repo_path)}' " - f"file from bucket {bucket_name}" - ) - size = ret["ContentLength"] - with repo_file_path.open("wb") as wfh: - with tools.utils.create_progress_bar( - file_progress=True - ) as progress: - task = progress.add_task( - description="Downloading...", total=size - ) - s3.download_fileobj( - Bucket=bucket_name, - Key=path, - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - updated_contents = re.sub( - r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$", - rf"\1=https://{release_domain}/\3", - repo_file_path.read_text(), - flags=re.MULTILINE, - ) - ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:") - ctx.print(updated_contents) - repo_file_path.write_text(updated_contents) - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - ctx.info(f"Could not find {repo_file_path} in bucket {bucket_name}") - - for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): - for path in filenames: - upload_path = pathlib.Path(dirpath, path) - relpath = upload_path.relative_to(repo_path) - size = upload_path.stat().st_size - ctx.info(f" {relpath}") - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Uploading...", total=size) - s3.upload_file( - str(upload_path), - tools.utils.RELEASE_BUCKET_NAME, - str(relpath), - Callback=tools.utils.UpdateProgress(progress, task), - ) - - -@publish.command( - arguments={ - "salt_version": { - "help": "The salt version to release.", - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - } -) -def github( - ctx: Context, - salt_version: str, - key_id: str = None, - repository: str = "saltstack/salt", -): - """ - Publish the release on GitHub releases. - """ - if TYPE_CHECKING: - assert key_id is not None - - s3 = boto3.client("s3") - - # Let's download the release artifacts stored in staging - artifacts_path = pathlib.Path.cwd() / "release-artifacts" - artifacts_path.mkdir(exist_ok=True) - release_artifacts_listing: dict[pathlib.Path, int] = {} - continuation_token = None - while True: - kwargs: dict[str, str] = {} - if continuation_token: - kwargs["ContinuationToken"] = continuation_token - ret = s3.list_objects_v2( - Bucket=tools.utils.STAGING_BUCKET_NAME, - Prefix=f"release-artifacts/{salt_version}", - FetchOwner=False, - **kwargs, - ) - contents = ret.pop("Contents", None) - if contents is None: - break - for entry in contents: - entry_path = pathlib.Path(entry["Key"]) - if entry_path.name.startswith("."): - continue - release_artifacts_listing[entry_path] = entry["Size"] - if not ret["IsTruncated"]: - break - continuation_token = ret["NextContinuationToken"] - - for entry_path, size in release_artifacts_listing.items(): - ctx.info(f" * {entry_path.name}") - local_path = artifacts_path / entry_path.name - with local_path.open("wb") as wfh: - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Downloading...", total=size) - s3.download_fileobj( - Bucket=tools.utils.STAGING_BUCKET_NAME, - Key=str(entry_path), - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - - for artifact in artifacts_path.iterdir(): - if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"): - continue - tools.utils.gpg_sign(ctx, key_id, artifact) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, artifacts_path) - - release_message = f"""\ - # Welcome to Salt v{salt_version} - - | :exclamation: ATTENTION | - |:-------------------------------------------------------------------------------------------------------------------------| - | The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. | - | Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`). - """ - release_message_path = artifacts_path / "gh-release-body.md" - release_message_path.write_text(textwrap.dedent(release_message).strip()) - - github_output = os.environ.get("GITHUB_OUTPUT") - if github_output is None: - ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.") - ctx.exit(0) - - if TYPE_CHECKING: - assert github_output is not None - - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") - - releases = get_salt_releases(ctx, repository) - if Version(salt_version) >= releases[-1]: - make_latest = True - else: - make_latest = False - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"make-latest={json.dumps(make_latest)}\n") - - artifacts_to_upload = [] - for artifact in artifacts_path.iterdir(): - if artifact.suffix == ".patch": - continue - if artifact.name == release_message_path.name: - continue - artifacts_to_upload.append(str(artifact.resolve())) - - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n") - ctx.exit(0) - - -@repo.command( - name="confirm-unreleased", - arguments={ - "salt_version": { - "help": "The salt version to check", - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - }, -) -def confirm_unreleased( - ctx: Context, salt_version: str, repository: str = "saltstack/salt" -): - """ - Confirm that the passed version is not yet tagged and/or released. - """ - releases = get_salt_releases(ctx, repository) - if Version(salt_version) in releases: - ctx.error(f"There's already a '{salt_version}' tag or github release.") - ctx.exit(1) - ctx.info(f"Could not find a release for Salt Version '{salt_version}'") - ctx.exit(0) - - -@repo.command( - name="confirm-staged", - arguments={ - "salt_version": { - "help": "The salt version to check", - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - }, -) -def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"): - """ - Confirm that the passed version has been staged for release. - """ - s3 = boto3.client("s3") - repo_release_files_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-files.json" - ) - repo_release_symlinks_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-symlinks.json" - ) - for remote_path in (repo_release_files_path, repo_release_symlinks_path): - try: - bucket_name = tools.utils.STAGING_BUCKET_NAME - ctx.info( - f"Checking for the presence of {remote_path} on bucket {bucket_name} ..." - ) - s3.head_object( - Bucket=bucket_name, - Key=str(remote_path), - ) - except ClientError as exc: - if "Error" not in exc.response: - log.exception(f"Could not get information about {remote_path}: {exc}") - ctx.exit(1) - if exc.response["Error"]["Code"] == "404": - ctx.error(f"Could not find {remote_path} in bucket.") - ctx.exit(1) - if exc.response["Error"]["Code"] == "400": - ctx.error(f"Could get information about {remote_path}: {exc}") - ctx.exit(1) - log.exception(f"Error getting information about {remote_path}: {exc}") - ctx.exit(1) - ctx.info(f"Version {salt_version} has been staged for release") - ctx.exit(0) - - -def _get_repo_detailed_file_list( - bucket_name: str, - bucket_folder: str = "", - glob_match: str = "**", -) -> list[dict[str, Any]]: - s3 = boto3.client("s3") - listing: list[dict[str, Any]] = [] - continuation_token = None - while True: - kwargs: dict[str, str] = {} - if continuation_token: - kwargs["ContinuationToken"] = continuation_token - ret = s3.list_objects_v2( - Bucket=bucket_name, - Prefix=bucket_folder, - FetchOwner=False, - **kwargs, - ) - contents = ret.pop("Contents", None) - if contents is None: - break - for entry in contents: - if fnmatch.fnmatch(entry["Key"], glob_match): - listing.append(entry) - if not ret["IsTruncated"]: - break - continuation_token = ret["NextContinuationToken"] - return listing - - -def _get_repo_file_list( - bucket_name: str, bucket_folder: str, glob_match: str -) -> list[str]: - return [ - entry["Key"] - for entry in _get_repo_detailed_file_list( - bucket_name, bucket_folder, glob_match=glob_match - ) - ] - - -def _get_remote_versions(bucket_name: str, remote_path: str): - log.info( - "Getting remote versions from bucket %r under path: %s", - bucket_name, - remote_path, - ) - remote_path = str(remote_path) - if not remote_path.endswith("/"): - remote_path += "/" - - s3 = boto3.client("s3") - ret = s3.list_objects( - Bucket=bucket_name, - Delimiter="/", - Prefix=remote_path, - ) - if "CommonPrefixes" not in ret: - return [] - versions = [] - for entry in ret["CommonPrefixes"]: - _, version = entry["Prefix"].rstrip("/").rsplit("/", 1) - if version == "latest": - continue - versions.append(Version(version)) - versions.sort(reverse=True) - log.info("Remote versions collected: %s", versions) - return versions - - -def _create_onedir_based_repo( - ctx: Context, - salt_version: str, - nightly_build_from: str | None, - repo_path: pathlib.Path, - incoming: pathlib.Path, - key_id: str, - distro: str, - pkg_suffixes: tuple[str, ...], -): - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro, - nightly_build_from=nightly_build_from, - ) - if not nightly_build_from: - repo_json_path = create_repo_path.parent.parent / "repo.json" - else: - repo_json_path = create_repo_path.parent / "repo.json" - - if nightly_build_from: - bucket_name = tools.utils.RELEASE_BUCKET_NAME - else: - bucket_name = tools.utils.STAGING_BUCKET_NAME - - release_json = {} - - copy_exclusions = ( - ".blake2b", - ".sha512", - ".sha3_512", - ".BLAKE2B", - ".SHA512", - ".SHA3_512", - ".json", - ) - hashes_base_path = create_repo_path / f"salt-{salt_version}" - for fpath in incoming.iterdir(): - if fpath.suffix in copy_exclusions: - continue - ctx.info(f"* Processing {fpath} ...") - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if "-amd64" in dpath.name.lower(): - arch = "amd64" - elif "-x86_64" in dpath.name.lower(): - arch = "x86_64" - elif "-x86" in dpath.name.lower(): - arch = "x86" - elif "-aarch64" in dpath.name.lower(): - arch = "aarch64" - else: - ctx.error( - f"Cannot pickup the right architecture from the filename '{dpath.name}'." - ) - ctx.exit(1) - if distro == "onedir": - if "-onedir-linux-" in dpath.name.lower(): - release_os = "linux" - elif "-onedir-darwin-" in dpath.name.lower(): - release_os = "macos" - elif "-onedir-windows-" in dpath.name.lower(): - release_os = "windows" - else: - ctx.error( - f"Cannot pickup the right OS from the filename '{dpath.name}'." - ) - ctx.exit(1) - else: - release_os = distro - release_json[dpath.name] = { - "name": dpath.name, - "version": salt_version, - "os": release_os, - "arch": arch, - } - for hash_name in ("blake2b", "sha512", "sha3_512"): - ctx.info(f" * Calculating {hash_name} ...") - hexdigest = _get_file_checksum(fpath, hash_name) - release_json[dpath.name][hash_name.upper()] = hexdigest - with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - with open(f"{dpath}.{hash_name}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - - for fpath in create_repo_path.iterdir(): - if fpath.suffix in pkg_suffixes: - continue - tools.utils.gpg_sign(ctx, key_id, fpath) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - repo_json = _get_repo_json_file_contents( - ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path - ) - if nightly_build_from: - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) - return - - major_version = Version(salt_version).major - minor_repo_json_path = create_repo_path.parent / "repo.json" - minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=bucket_name, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - minor_repo_json[salt_version] = release_json - versions = _parse_versions(*list(minor_repo_json)) - ctx.info( - f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in versions)}" - ) - minor_versions = [v for v in versions if v.major == major_version] - ctx.info( - f"Collected versions(Matching major: {major_version}) from " - f"{minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in minor_versions)}" - ) - if not versions: - latest_version = Version(salt_version) - else: - latest_version = versions[0] - if not minor_versions: - latest_minor_version = Version(salt_version) - else: - latest_minor_version = minor_versions[0] - - ctx.info(f"Release Version: {salt_version}") - ctx.info(f"Latest Repo Version: {latest_version}") - ctx.info(f"Latest Release Minor Version: {latest_minor_version}") - - latest_link = create_repo_path.parent.parent / "latest" - if latest_version <= salt_version: - repo_json["latest"] = release_json - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - if latest_link.exists(): - latest_link.unlink() - latest_link.symlink_to(f"minor/{salt_version}") - else: - ctx.info( - f"Not creating the '{latest_link.relative_to(repo_path)}' symlink " - f"since {latest_version} > {salt_version}" - ) - - major_link = create_repo_path.parent.parent / str(major_version) - if latest_minor_version <= salt_version: - minor_repo_json["latest"] = release_json - # This is the latest minor, update the major in the top level repo.json - # to this version - repo_json[str(major_version)] = release_json - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - if major_link.exists(): - major_link.unlink() - major_link.symlink_to(f"minor/{salt_version}") - else: - ctx.info( - f"Not creating the '{major_link.relative_to(repo_path)}' symlink " - f"since {latest_minor_version} > {salt_version}" - ) - - ctx.info(f"Writing {minor_repo_json_path} ...") - minor_repo_json_path.write_text(json.dumps(minor_repo_json, sort_keys=True)) - - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) - - -def _get_repo_json_file_contents( - ctx: Context, - bucket_name: str, - repo_path: pathlib.Path, - repo_json_path: pathlib.Path, -) -> dict[str, Any]: - s3 = boto3.client("s3") - repo_json: dict[str, Any] = {} - try: - ret = s3.head_object( - Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path)) - ) - ctx.info( - f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file " - f"from bucket {bucket_name}" - ) - size = ret["ContentLength"] - with repo_json_path.open("wb") as wfh: - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Downloading...", total=size) - s3.download_fileobj( - Bucket=bucket_name, - Key=str(repo_json_path.relative_to(repo_path)), - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - with repo_json_path.open() as rfh: - repo_json = json.load(rfh) - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - ctx.info(f"Could not find {repo_json_path} in bucket {bucket_name}") - if repo_json: - ctx.print(repo_json, soft_wrap=True) - return repo_json - - -def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str: - - with fpath.open("rb") as rfh: - try: - digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined] - except AttributeError: - # Python < 3.11 - buf = bytearray(2**18) # Reusable buffer to reduce allocations. - view = memoryview(buf) - digest = getattr(hashlib, hash_name)() - while True: - size = rfh.readinto(buf) - if size == 0: - break # EOF - digest.update(view[:size]) - hexdigest: str = digest.hexdigest() - return hexdigest - - -def _publish_repo( - ctx: Context, - repo_path: pathlib.Path, - salt_version: str, - nightly_build: bool = False, - stage: bool = False, -): - """ - Publish packaging repositories. - """ - if nightly_build: - bucket_name = tools.utils.RELEASE_BUCKET_NAME - elif stage: - bucket_name = tools.utils.STAGING_BUCKET_NAME - else: - bucket_name = tools.utils.RELEASE_BUCKET_NAME - - ctx.info("Preparing upload ...") - s3 = boto3.client("s3") - to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {} - to_upload_paths: list[pathlib.Path] = [] - symlink_paths: list[str] = [] - uploaded_files: list[str] = [] - for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): - for dirname in dirnames: - path = pathlib.Path(dirpath, dirname) - if not path.is_symlink(): - continue - # This is a symlink, then we need to delete all files under - # that directory in S3 because S3 does not understand symlinks - # and we would end up adding files to that folder instead of - # replacing it. - try: - relpath = path.relative_to(repo_path) - ret = s3.list_objects( - Bucket=bucket_name, - Prefix=str(relpath), - ) - if "Contents" not in ret: - continue - objects = [] - for entry in ret["Contents"]: - objects.append({"Key": entry["Key"]}) - to_delete_paths[path] = objects - symlink_paths.append(str(relpath)) - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - - for fpath in filenames: - path = pathlib.Path(dirpath, fpath) - to_upload_paths.append(path) - - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Deleting directories to override.", total=len(to_delete_paths) - ) - for base, objects in to_delete_paths.items(): - relpath = base.relative_to(repo_path) - bucket_uri = f"s3://{bucket_name}/{relpath}" - progress.update(task, description=f"Deleting {bucket_uri}") - try: - ret = s3.delete_objects( - Bucket=bucket_name, - Delete={"Objects": objects}, - ) - except ClientError: - log.exception(f"Failed to delete {bucket_uri}") - finally: - progress.update(task, advance=1) - - try: - ctx.info("Uploading repository ...") - for upload_path in to_upload_paths: - relpath = upload_path.relative_to(repo_path) - size = upload_path.stat().st_size - ctx.info(f" {relpath}") - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Uploading...", total=size) - s3.upload_file( - str(upload_path), - bucket_name, - str(relpath), - Callback=tools.utils.UpdateProgress(progress, task), - ExtraArgs={ - "Metadata": { - "x-amz-meta-salt-release-version": salt_version, - } - }, - ) - uploaded_files.append(str(relpath)) - if stage is True: - repo_files_path = f"release-artifacts/{salt_version}/.release-files.json" - ctx.info(f"Uploading {repo_files_path} ...") - s3.put_object( - Key=repo_files_path, - Bucket=bucket_name, - Body=json.dumps(uploaded_files).encode(), - Metadata={ - "x-amz-meta-salt-release-version": salt_version, - }, - ) - repo_symlinks_path = ( - f"release-artifacts/{salt_version}/.release-symlinks.json" - ) - ctx.info(f"Uploading {repo_symlinks_path} ...") - s3.put_object( - Key=repo_symlinks_path, - Bucket=bucket_name, - Body=json.dumps(symlink_paths).encode(), - Metadata={ - "x-amz-meta-salt-release-version": salt_version, - }, - ) - except KeyboardInterrupt: - pass - - -def _create_top_level_repo_path( - ctx: Context, - repo_path: pathlib.Path, - salt_version: str, - distro: str, - distro_version: str | None = None, # pylint: disable=bad-whitespace - distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build_from: str | None = None, # pylint: disable=bad-whitespace -): - create_repo_path = repo_path - if nightly_build_from: - create_repo_path = ( - create_repo_path - / "salt-dev" - / nightly_build_from - / datetime.utcnow().strftime("%Y-%m-%d") - ) - create_repo_path.mkdir(exist_ok=True, parents=True) - with ctx.chdir(create_repo_path.parent): - latest_nightly_symlink = pathlib.Path("latest") - if not latest_nightly_symlink.exists(): - ctx.info( - f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..." - ) - latest_nightly_symlink.symlink_to( - create_repo_path.name, target_is_directory=True - ) - elif "rc" in salt_version: - create_repo_path = create_repo_path / "salt_rc" - create_repo_path = create_repo_path / "salt" / "py3" / distro - if distro_version: - create_repo_path = create_repo_path / distro_version - if distro_arch: - create_repo_path = create_repo_path / distro_arch - create_repo_path.mkdir(exist_ok=True, parents=True) - return create_repo_path - - -def _create_repo_path( - ctx: Context, - repo_path: pathlib.Path, - salt_version: str, - distro: str, - distro_version: str | None = None, # pylint: disable=bad-whitespace - distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build_from: str | None = None, # pylint: disable=bad-whitespace -): - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version, - distro_arch, - nightly_build_from=nightly_build_from, - ) - create_repo_path = create_repo_path / "minor" / salt_version - create_repo_path.mkdir(exist_ok=True, parents=True) - return create_repo_path - - -def _parse_versions(*versions: str) -> list[Version]: - _versions = [] - for version in set(versions): - if version == "latest": - continue - _versions.append(Version(version)) - if _versions: - _versions.sort(reverse=True) - return _versions diff --git a/tools/pkg/repo/__init__.py b/tools/pkg/repo/__init__.py new file mode 100644 index 00000000000..d965fcfd923 --- /dev/null +++ b/tools/pkg/repo/__init__.py @@ -0,0 +1,181 @@ +""" +These commands are used to build the package repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging +import os +import pathlib +import shutil +import sys +from typing import TYPE_CHECKING + +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import Version, get_salt_releases + +try: + import boto3 + from botocore.exceptions import ClientError +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + +log = logging.getLogger(__name__) + +# Define the command group +repo = command_group( + name="repo", + help="Packaging Repository Related Commands", + description=__doc__, + parent="pkg", +) + + +@repo.command(name="backup-previous-releases") +def backup_previous_releases(ctx: Context): + """ + Backup release bucket. + """ + _rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME) + ctx.info("Done") + + +@repo.command(name="restore-previous-releases") +def restore_previous_releases(ctx: Context): + """ + Restore release bucket from backup. + """ + _rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME) + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"backup-complete=true\n") + ctx.info("Done") + + +def _rclone(ctx: Context, src: str, dst: str): + rclone = shutil.which("rclone") + if not rclone: + ctx.error("Could not find the rclone binary") + ctx.exit(1) + + if TYPE_CHECKING: + assert rclone + + env = os.environ.copy() + env["RCLONE_CONFIG_S3_TYPE"] = "s3" + cmdline: list[str] = [ + rclone, + "sync", + "--auto-confirm", + "--human-readable", + "--checksum", + "--color=always", + "--metadata", + "--s3-env-auth", + "--s3-location-constraint=us-west-2", + "--s3-provider=AWS", + "--s3-region=us-west-2", + "--stats-file-name-length=0", + "--stats-one-line", + "--stats=5s", + "--transfers=50", + "--fast-list", + "--verbose", + ] + if src == tools.utils.RELEASE_BUCKET_NAME: + cmdline.append("--s3-storage-class=INTELLIGENT_TIERING") + cmdline.extend([f"s3://{src}", f"s3://{dst}"]) + ctx.info(f"Running: {' '.join(cmdline)}") + ret = ctx.run(*cmdline, env=env, check=False) + if ret.returncode: + ctx.error(f"Failed to sync from s3://{src} to s3://{dst}") + ctx.exit(1) + + +@repo.command( + name="confirm-unreleased", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + }, +) +def confirm_unreleased( + ctx: Context, salt_version: str, repository: str = "saltstack/salt" +): + """ + Confirm that the passed version is not yet tagged and/or released. + """ + releases = get_salt_releases(ctx, repository) + if Version(salt_version) in releases: + ctx.error(f"There's already a '{salt_version}' tag or github release.") + ctx.exit(1) + ctx.info(f"Could not find a release for Salt Version '{salt_version}'") + ctx.exit(0) + + +@repo.command( + name="confirm-staged", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + }, +) +def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"): + """ + Confirm that the passed version has been staged for release. + """ + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + for remote_path in (repo_release_files_path, repo_release_symlinks_path): + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + ctx.info( + f"Checking for the presence of {remote_path} on bucket {bucket_name} ..." + ) + s3.head_object( + Bucket=bucket_name, + Key=str(remote_path), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Could not get information about {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error(f"Could get information about {remote_path}: {exc}") + ctx.exit(1) + log.exception(f"Error getting information about {remote_path}: {exc}") + ctx.exit(1) + ctx.info(f"Version {salt_version} has been staged for release") + ctx.exit(0) diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py new file mode 100644 index 00000000000..60ed8ad0570 --- /dev/null +++ b/tools/pkg/repo/create.py @@ -0,0 +1,1038 @@ +""" +These commands are used to build the package repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import hashlib +import json +import logging +import os +import pathlib +import shutil +import sys +import textwrap +from typing import TYPE_CHECKING + +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import ( + Version, + create_full_repo_path, + create_top_level_repo_path, + get_repo_json_file_contents, + parse_versions, +) + +try: + import boto3 +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + +log = logging.getLogger(__name__) + +create = command_group( + name="create", + help="Packaging Repository Creation Related Commands", + parent=["pkg", "repo"], +) + + +_deb_distro_info = { + "debian": { + "10": { + "label": "deb10ary", + "codename": "buster", + "suitename": "oldstable", + }, + "11": { + "label": "deb11ary", + "codename": "bullseye", + "suitename": "stable", + }, + }, + "ubuntu": { + "20.04": { + "label": "salt_ubuntu2004", + "codename": "focal", + }, + "22.04": { + "label": "salt_ubuntu2204", + "codename": "jammy", + }, + }, +} + + +@create.command( + name="deb", + arguments={ + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "distro": { + "help": "The debian based distribution to build the repository for", + "choices": list(_deb_distro_info), + "required": True, + }, + "distro_version": { + "help": "The distro version.", + "required": True, + }, + "distro_arch": { + "help": "The distribution architecture", + "choices": ("x86_64", "amd64", "aarch64", "arm64"), + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def debian( + ctx: Context, + salt_version: str = None, + distro: str = None, + distro_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + distro_arch: str = "amd64", + nightly_build_from: str = None, +): + """ + Create the debian repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert distro is not None + assert distro_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + display_name = f"{distro.capitalize()} {distro_version}" + if distro_version not in _deb_distro_info[distro]: + ctx.error(f"Support for {display_name} is missing.") + ctx.exit(1) + + if distro_arch == "x86_64": + ctx.info(f"The {distro_arch} arch is an alias for 'amd64'. Adjusting.") + distro_arch = "amd64" + + if distro_arch == "aarch64": + ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") + distro_arch = "arm64" + + distro_details = _deb_distro_info[distro][distro_version] + + ctx.info("Distribution Details:") + ctx.info(distro_details) + if TYPE_CHECKING: + assert isinstance(distro_details["label"], str) + assert isinstance(distro_details["codename"], str) + assert isinstance(distro_details["suitename"], str) + label: str = distro_details["label"] + codename: str = distro_details["codename"] + + ftp_archive_config_suite = "" + if distro == "debian": + suitename: str = distro_details["suitename"] + ftp_archive_config_suite = ( + f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" + ) + archive_description = f"SaltProject {display_name} Python 3{'' if not nightly_build_from else ' development'} Salt package repo" + ftp_archive_config = f"""\ + APT::FTPArchive::Release::Origin "SaltProject"; + APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} + APT::FTPArchive::Release::Codename "{codename}"; + APT::FTPArchive::Release::Architectures "{distro_arch}"; + APT::FTPArchive::Release::Components "main"; + APT::FTPArchive::Release::Description "{archive_description}"; + APT::FTPArchive::Release::Acquire-By-Hash "yes"; + Dir {{ + ArchiveDir "."; + }}; + BinDirectory "pool" {{ + Packages "dists/{codename}/main/binary-{distro_arch}/Packages"; + Sources "dists/{codename}/main/source/Sources"; + Contents "dists/{codename}/main/Contents-{distro_arch}"; + }} + """ + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" + ctx.info(f"Writing {ftp_archive_config_file} ...") + ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config)) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + pool_path = create_repo_path / "pool" + pool_path.mkdir(exist_ok=True) + for fpath in incoming.iterdir(): + dpath = pool_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if fpath.suffix == ".dsc": + ctx.info(f"Running 'debsign' on {dpath} ...") + ctx.run("debsign", "--re-sign", "-k", key_id, str(dpath), interactive=True) + + dists_path = create_repo_path / "dists" + symlink_parent_path = dists_path / codename / "main" + symlink_paths = ( + symlink_parent_path / "by-hash" / "SHA256", + symlink_parent_path / "source" / "by-hash" / "SHA256", + symlink_parent_path / f"binary-{distro_arch}" / "by-hash" / "SHA256", + ) + + for path in symlink_paths: + path.mkdir(exist_ok=True, parents=True) + + cmdline = ["apt-ftparchive", "generate", "apt-ftparchive.conf"] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + + ctx.info("Creating by-hash symlinks ...") + for path in symlink_paths: + for fpath in path.parent.parent.iterdir(): + if not fpath.is_file(): + continue + sha256sum = ctx.run("sha256sum", str(fpath), capture=True) + link = path / sha256sum.stdout.decode().split()[0] + link.symlink_to(f"../../{fpath.name}") + + cmdline = [ + "apt-ftparchive", + "--no-md5", + "--no-sha1", + "--no-sha512", + "release", + "-c", + "apt-ftparchive.conf", + f"dists/{codename}/", + ] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ret = ctx.run(*cmdline, capture=True, cwd=create_repo_path) + release_file = dists_path / codename / "Release" + ctx.info(f"Writing {release_file} with the output of the previous command...") + release_file.write_bytes(ret.stdout) + + cmdline = [ + "gpg", + "-u", + key_id, + "-o", + f"dists/{codename}/InRelease", + "-a", + "-s", + "--clearsign", + f"dists/{codename}/Release", + ] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + + cmdline = [ + "gpg", + "-u", + key_id, + "-o", + f"dists/{codename}/Release.gpg", + "-a", + "-b", + "-s", + f"dists/{codename}/Release", + ] + + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + if not nightly_build_from: + remote_versions = _get_remote_versions( + tools.utils.STAGING_BUCKET_NAME, + create_repo_path.parent.relative_to(repo_path), + ) + major_version = Version(salt_version).major + matching_major = None + for version in remote_versions: + if version.major == major_version: + matching_major = version + break + if not matching_major or matching_major <= salt_version: + major_link = create_repo_path.parent.parent / str(major_version) + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + major_link.symlink_to(f"minor/{salt_version}") + if not remote_versions or remote_versions[0] <= salt_version: + latest_link = create_repo_path.parent.parent / "latest" + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + latest_link.symlink_to(f"minor/{salt_version}") + + ctx.info("Done") + + +_rpm_distro_info = { + "amazon": ["2"], + "redhat": ["7", "8", "9"], + "fedora": ["36", "37", "38"], + "photon": ["3", "4"], +} + + +@create.command( + name="rpm", + arguments={ + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "distro": { + "help": "The debian based distribution to build the repository for", + "choices": list(_rpm_distro_info), + "required": True, + }, + "distro_version": { + "help": "The distro version.", + "required": True, + }, + "distro_arch": { + "help": "The distribution architecture", + "choices": ("x86_64", "aarch64", "arm64"), + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def rpm( + ctx: Context, + salt_version: str = None, + distro: str = None, + distro_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + distro_arch: str = "amd64", + nightly_build_from: str = None, +): + """ + Create the redhat repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert distro is not None + assert distro_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + display_name = f"{distro.capitalize()} {distro_version}" + if distro_version not in _rpm_distro_info[distro]: + ctx.error(f"Support for {display_name} is missing.") + ctx.exit(1) + + if distro_arch == "aarch64": + ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") + distro_arch = "arm64" + + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + for fpath in incoming.iterdir(): + if ".src" in fpath.suffixes: + dpath = create_repo_path / "SRPMS" / fpath.name + else: + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if fpath.suffix == ".rpm": + ctx.info(f"Running 'rpmsign' on {dpath} ...") + ctx.run( + "rpmsign", + "--key-id", + key_id, + "--addsign", + "--digest-algo=sha256", + str(dpath), + ) + + createrepo = shutil.which("createrepo") + if createrepo is None: + container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9" + ctx.info(f"Using docker container '{container}' to call 'createrepo'...") + uid = ctx.run("id", "-u", capture=True).stdout.strip().decode() + gid = ctx.run("id", "-g", capture=True).stdout.strip().decode() + ctx.run( + "docker", + "run", + "--rm", + "-v", + f"{create_repo_path.resolve()}:/code", + "-u", + f"{uid}:{gid}", + "-w", + "/code", + container, + "createrepo", + ".", + ) + else: + ctx.run("createrepo", ".", cwd=create_repo_path) + + if nightly_build_from: + repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") + else: + repo_domain = os.environ.get( + "SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io" + ) + + salt_repo_user = os.environ.get("SALT_REPO_USER") + if salt_repo_user: + log.info( + "SALT_REPO_USER: %s", + salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1], + ) + salt_repo_pass = os.environ.get("SALT_REPO_PASS") + if salt_repo_pass: + log.info( + "SALT_REPO_PASS: %s", + salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1], + ) + if salt_repo_user and salt_repo_pass: + repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}" + + def _create_repo_file(create_repo_path, url_suffix): + ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") + if nightly_build_from: + base_url = f"salt-dev/{nightly_build_from}/" + repo_file_contents = "[salt-nightly-repo]" + elif "rc" in salt_version: + base_url = "salt_rc/" + repo_file_contents = "[salt-rc-repo]" + else: + base_url = "" + repo_file_contents = "[salt-repo]" + base_url += f"salt/py3/{distro}/{distro_version}/{distro_arch}/{url_suffix}" + if distro == "amazon": + distro_name = "Amazon Linux" + elif distro == "redhat": + distro_name = "RHEL/CentOS" + else: + distro_name = distro.capitalize() + + if distro != "photon" and int(distro_version) < 8: + failovermethod = "\n failovermethod=priority" + else: + failovermethod = "" + + repo_file_contents += textwrap.dedent( + f""" + name=Salt repo for {distro_name} {distro_version} PY3 + baseurl=https://{repo_domain}/{base_url} + skip_if_unavailable=True{failovermethod} + priority=10 + enabled=1 + enabled_metadata=1 + gpgcheck=1 + gpgkey=https://{repo_domain}/{base_url}/{tools.utils.GPG_KEY_FILENAME}.pub + """ + ) + create_repo_path.write_text(repo_file_contents) + + if nightly_build_from: + repo_file_path = create_repo_path.parent / "nightly.repo" + else: + repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" + + _create_repo_file(repo_file_path, f"minor/{salt_version}") + + if not nightly_build_from: + remote_versions = _get_remote_versions( + tools.utils.STAGING_BUCKET_NAME, + create_repo_path.parent.relative_to(repo_path), + ) + major_version = Version(salt_version).major + matching_major = None + for version in remote_versions: + if version.major == major_version: + matching_major = version + break + if not matching_major or matching_major <= salt_version: + major_link = create_repo_path.parent.parent / str(major_version) + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + major_link.symlink_to(f"minor/{salt_version}") + repo_file_path = create_repo_path.parent.parent / f"{major_version}.repo" + _create_repo_file(repo_file_path, str(major_version)) + if not remote_versions or remote_versions[0] <= salt_version: + latest_link = create_repo_path.parent.parent / "latest" + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + latest_link.symlink_to(f"minor/{salt_version}") + repo_file_path = create_repo_path.parent.parent / "latest.repo" + _create_repo_file(repo_file_path, "latest") + + ctx.info("Done") + + +@create.command( + name="windows", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def windows( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the windows repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="windows", + pkg_suffixes=(".msi", ".exe"), + ) + ctx.info("Done") + + +@create.command( + name="macos", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def macos( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the windows repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="macos", + pkg_suffixes=(".pkg",), + ) + ctx.info("Done") + + +@create.command( + name="onedir", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def onedir( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the onedir repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="onedir", + pkg_suffixes=(".xz", ".zip"), + ) + ctx.info("Done") + + +@create.command( + name="src", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def src( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the onedir repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro="src", + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + create_repo_path = create_repo_path / salt_version + create_repo_path.mkdir(exist_ok=True, parents=True) + hashes_base_path = create_repo_path / f"salt-{salt_version}" + for fpath in incoming.iterdir(): + if fpath.suffix not in (".gz",): + continue + ctx.info(f"* Processing {fpath} ...") + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + for hash_name in ("blake2b", "sha512", "sha3_512"): + ctx.info(f" * Calculating {hash_name} ...") + hexdigest = _get_file_checksum(fpath, hash_name) + with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + with open(f"{dpath}.{hash_name}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + + for fpath in create_repo_path.iterdir(): + if fpath.suffix in (".pub", ".gpg"): + continue + tools.utils.gpg_sign(ctx, key_id, fpath) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + ctx.info("Done") + + +def _get_remote_versions(bucket_name: str, remote_path: str): + log.info( + "Getting remote versions from bucket %r under path: %s", + bucket_name, + remote_path, + ) + remote_path = str(remote_path) + if not remote_path.endswith("/"): + remote_path += "/" + + s3 = boto3.client("s3") + ret = s3.list_objects( + Bucket=bucket_name, + Delimiter="/", + Prefix=remote_path, + ) + if "CommonPrefixes" not in ret: + return [] + versions = [] + for entry in ret["CommonPrefixes"]: + _, version = entry["Prefix"].rstrip("/").rsplit("/", 1) + if version == "latest": + continue + versions.append(Version(version)) + versions.sort(reverse=True) + log.info("Remote versions collected: %s", versions) + return versions + + +def _create_onedir_based_repo( + ctx: Context, + salt_version: str, + nightly_build_from: str | None, + repo_path: pathlib.Path, + incoming: pathlib.Path, + key_id: str, + distro: str, + pkg_suffixes: tuple[str, ...], +): + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, + ) + if not nightly_build_from: + repo_json_path = create_repo_path.parent.parent / "repo.json" + else: + repo_json_path = create_repo_path.parent / "repo.json" + + if nightly_build_from: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + else: + bucket_name = tools.utils.STAGING_BUCKET_NAME + + release_json = {} + + copy_exclusions = ( + ".blake2b", + ".sha512", + ".sha3_512", + ".BLAKE2B", + ".SHA512", + ".SHA3_512", + ".json", + ) + hashes_base_path = create_repo_path / f"salt-{salt_version}" + for fpath in incoming.iterdir(): + if fpath.suffix in copy_exclusions: + continue + ctx.info(f"* Processing {fpath} ...") + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if "-amd64" in dpath.name.lower(): + arch = "amd64" + elif "-x86_64" in dpath.name.lower(): + arch = "x86_64" + elif "-x86" in dpath.name.lower(): + arch = "x86" + elif "-aarch64" in dpath.name.lower(): + arch = "aarch64" + else: + ctx.error( + f"Cannot pickup the right architecture from the filename '{dpath.name}'." + ) + ctx.exit(1) + if distro == "onedir": + if "-onedir-linux-" in dpath.name.lower(): + release_os = "linux" + elif "-onedir-darwin-" in dpath.name.lower(): + release_os = "macos" + elif "-onedir-windows-" in dpath.name.lower(): + release_os = "windows" + else: + ctx.error( + f"Cannot pickup the right OS from the filename '{dpath.name}'." + ) + ctx.exit(1) + else: + release_os = distro + release_json[dpath.name] = { + "name": dpath.name, + "version": salt_version, + "os": release_os, + "arch": arch, + } + for hash_name in ("blake2b", "sha512", "sha3_512"): + ctx.info(f" * Calculating {hash_name} ...") + hexdigest = _get_file_checksum(fpath, hash_name) + release_json[dpath.name][hash_name.upper()] = hexdigest + with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + with open(f"{dpath}.{hash_name}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + + for fpath in create_repo_path.iterdir(): + if fpath.suffix in pkg_suffixes: + continue + tools.utils.gpg_sign(ctx, key_id, fpath) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + repo_json = get_repo_json_file_contents( + ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path + ) + if nightly_build_from: + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) + return + + major_version = Version(salt_version).major + minor_repo_json_path = create_repo_path.parent / "repo.json" + minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=bucket_name, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + minor_repo_json[salt_version] = release_json + versions = parse_versions(*list(minor_repo_json)) + ctx.info( + f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in versions)}" + ) + minor_versions = [v for v in versions if v.major == major_version] + ctx.info( + f"Collected versions(Matching major: {major_version}) from " + f"{minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in minor_versions)}" + ) + if not versions: + latest_version = Version(salt_version) + else: + latest_version = versions[0] + if not minor_versions: + latest_minor_version = Version(salt_version) + else: + latest_minor_version = minor_versions[0] + + ctx.info(f"Release Version: {salt_version}") + ctx.info(f"Latest Repo Version: {latest_version}") + ctx.info(f"Latest Release Minor Version: {latest_minor_version}") + + latest_link = create_repo_path.parent.parent / "latest" + if latest_version <= salt_version: + repo_json["latest"] = release_json + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + if latest_link.exists(): + latest_link.unlink() + latest_link.symlink_to(f"minor/{salt_version}") + else: + ctx.info( + f"Not creating the '{latest_link.relative_to(repo_path)}' symlink " + f"since {latest_version} > {salt_version}" + ) + + major_link = create_repo_path.parent.parent / str(major_version) + if latest_minor_version <= salt_version: + minor_repo_json["latest"] = release_json + # This is the latest minor, update the major in the top level repo.json + # to this version + repo_json[str(major_version)] = release_json + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + if major_link.exists(): + major_link.unlink() + major_link.symlink_to(f"minor/{salt_version}") + else: + ctx.info( + f"Not creating the '{major_link.relative_to(repo_path)}' symlink " + f"since {latest_minor_version} > {salt_version}" + ) + + ctx.info(f"Writing {minor_repo_json_path} ...") + minor_repo_json_path.write_text(json.dumps(minor_repo_json, sort_keys=True)) + + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) + + +def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str: + + with fpath.open("rb") as rfh: + try: + digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined] + except AttributeError: + # Python < 3.11 + buf = bytearray(2**18) # Reusable buffer to reduce allocations. + view = memoryview(buf) + digest = getattr(hashlib, hash_name)() + while True: + size = rfh.readinto(buf) + if size == 0: + break # EOF + digest.update(view[:size]) + hexdigest: str = digest.hexdigest() + return hexdigest diff --git a/tools/pkg/repo/publish.py b/tools/pkg/repo/publish.py new file mode 100644 index 00000000000..1c87d20b490 --- /dev/null +++ b/tools/pkg/repo/publish.py @@ -0,0 +1,653 @@ +""" +These commands are used to build the package repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import fnmatch +import json +import logging +import os +import pathlib +import re +import sys +import tempfile +import textwrap +from typing import TYPE_CHECKING, Any + +import packaging.version +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import ( + Version, + create_full_repo_path, + get_repo_json_file_contents, + get_salt_releases, + parse_versions, +) + +try: + import boto3 + from botocore.exceptions import ClientError +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + +log = logging.getLogger(__name__) + +publish = command_group( + name="publish", + help="Packaging Repository Publication Related Commands", + parent=["pkg", "repo"], +) + + +@publish.command( + arguments={ + "repo_path": { + "help": "Local path for the repository that shall be published.", + }, + "salt_version": { + "help": "The salt version of the repository to publish", + "required": True, + }, + } +) +def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): + """ + Publish to the nightly bucket. + """ + if TYPE_CHECKING: + assert salt_version is not None + _publish_repo( + ctx, repo_path=repo_path, nightly_build=True, salt_version=salt_version + ) + + +@publish.command( + arguments={ + "repo_path": { + "help": "Local path for the repository that shall be published.", + }, + "salt_version": { + "help": "The salt version of the repository to publish", + "required": True, + }, + } +) +def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): + """ + Publish to the staging bucket. + """ + if TYPE_CHECKING: + assert salt_version is not None + _publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version) + + +@publish.command( + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + } +) +def release(ctx: Context, salt_version: str): + """ + Publish to the release bucket. + """ + if "rc" in salt_version: + bucket_folder = "salt_rc/salt/py3" + else: + bucket_folder = "salt/py3" + + files_to_copy: list[str] + directories_to_delete: list[str] = [] + + ctx.info("Grabbing remote file listing of files to copy...") + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + with local_release_files_path.open("wb") as wfh: + ctx.info( + f"Downloading {repo_release_files_path} from bucket {bucket_name} ..." + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_release_files_path), + Fileobj=wfh, + ) + files_to_copy = json.loads(local_release_files_path.read_text()) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {repo_release_files_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error( + f"Could not download {repo_release_files_path} from bucket: {exc}" + ) + ctx.exit(1) + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + local_release_symlinks_path = ( + pathlib.Path(tsd) / repo_release_symlinks_path.name + ) + try: + with local_release_symlinks_path.open("wb") as wfh: + ctx.info( + f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..." + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_release_symlinks_path), + Fileobj=wfh, + ) + directories_to_delete = json.loads(local_release_symlinks_path.read_text()) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {repo_release_symlinks_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error( + f"Could not download {repo_release_symlinks_path} from bucket: {exc}" + ) + ctx.exit(1) + log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") + ctx.exit(1) + + if directories_to_delete: + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Deleting directories to override.", + total=len(directories_to_delete), + ) + for directory in directories_to_delete: + try: + objects_to_delete: list[dict[str, str]] = [] + for path in _get_repo_file_list( + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + bucket_folder=bucket_folder, + glob_match=f"{directory}/**", + ): + objects_to_delete.append({"Key": path}) + if objects_to_delete: + s3.delete_objects( + Bucket=tools.utils.RELEASE_BUCKET_NAME, + Delete={"Objects": objects_to_delete}, + ) + except ClientError: + log.exception("Failed to delete remote files") + finally: + progress.update(task, advance=1) + + already_copied_files: list[str] = [] + s3 = boto3.client("s3") + dot_repo_files = [] + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Copying files between buckets", total=len(files_to_copy) + ) + for fpath in files_to_copy: + if fpath in already_copied_files: + continue + if fpath.endswith(".repo"): + dot_repo_files.append(fpath) + ctx.info(f" * Copying {fpath}") + try: + s3.copy_object( + Bucket=tools.utils.RELEASE_BUCKET_NAME, + Key=fpath, + CopySource={ + "Bucket": tools.utils.STAGING_BUCKET_NAME, + "Key": fpath, + }, + MetadataDirective="COPY", + TaggingDirective="COPY", + ServerSideEncryption="AES256", + ) + already_copied_files.append(fpath) + except ClientError: + log.exception(f"Failed to copy {fpath}") + finally: + progress.update(task, advance=1) + + # Now let's get the onedir based repositories where we need to update several repo.json + major_version = packaging.version.parse(salt_version).major + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + repo_path = pathlib.Path(tsd) + for distro in ("windows", "macos", "onedir"): + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro=distro, + ) + repo_json_path = create_repo_path.parent.parent / "repo.json" + + release_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=repo_json_path, + ) + minor_repo_json_path = create_repo_path.parent / "repo.json" + + staging_minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.STAGING_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + release_minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + + release_json = staging_minor_repo_json[salt_version] + + major_version = Version(salt_version).major + versions = parse_versions(*list(release_minor_repo_json)) + ctx.info( + f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in versions)}" + ) + minor_versions = [v for v in versions if v.major == major_version] + ctx.info( + f"Collected versions(Matching major: {major_version}) from " + f"{minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in minor_versions)}" + ) + if not versions: + latest_version = Version(salt_version) + else: + latest_version = versions[0] + if not minor_versions: + latest_minor_version = Version(salt_version) + else: + latest_minor_version = minor_versions[0] + + ctx.info(f"Release Version: {salt_version}") + ctx.info(f"Latest Repo Version: {latest_version}") + ctx.info(f"Latest Release Minor Version: {latest_minor_version}") + + # Add the minor version + release_minor_repo_json[salt_version] = release_json + + if latest_version <= salt_version: + release_repo_json["latest"] = release_json + + if latest_minor_version <= salt_version: + release_minor_repo_json["latest"] = release_json + + ctx.info(f"Writing {minor_repo_json_path} ...") + minor_repo_json_path.write_text( + json.dumps(release_minor_repo_json, sort_keys=True) + ) + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True)) + + # And now, let's get the several rpm "*.repo" files to update the base + # domain from staging to release + release_domain = os.environ.get( + "SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io" + ) + for path in dot_repo_files: + repo_file_path = repo_path.joinpath(path) + repo_file_path.parent.mkdir(exist_ok=True, parents=True) + bucket_name = tools.utils.STAGING_BUCKET_NAME + try: + ret = s3.head_object(Bucket=bucket_name, Key=path) + ctx.info( + f"Downloading existing '{repo_file_path.relative_to(repo_path)}' " + f"file from bucket {bucket_name}" + ) + size = ret["ContentLength"] + with repo_file_path.open("wb") as wfh: + with tools.utils.create_progress_bar( + file_progress=True + ) as progress: + task = progress.add_task( + description="Downloading...", total=size + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=path, + Fileobj=wfh, + Callback=tools.utils.UpdateProgress(progress, task), + ) + updated_contents = re.sub( + r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$", + rf"\1=https://{release_domain}/\3", + repo_file_path.read_text(), + flags=re.MULTILINE, + ) + ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:") + ctx.print(updated_contents) + repo_file_path.write_text(updated_contents) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + ctx.info(f"Could not find {repo_file_path} in bucket {bucket_name}") + + for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): + for path in filenames: + upload_path = pathlib.Path(dirpath, path) + relpath = upload_path.relative_to(repo_path) + size = upload_path.stat().st_size + ctx.info(f" {relpath}") + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Uploading...", total=size) + s3.upload_file( + str(upload_path), + tools.utils.RELEASE_BUCKET_NAME, + str(relpath), + Callback=tools.utils.UpdateProgress(progress, task), + ) + + +@publish.command( + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + } +) +def github( + ctx: Context, + salt_version: str, + key_id: str = None, + repository: str = "saltstack/salt", +): + """ + Publish the release on GitHub releases. + """ + if TYPE_CHECKING: + assert key_id is not None + + s3 = boto3.client("s3") + + # Let's download the release artifacts stored in staging + artifacts_path = pathlib.Path.cwd() / "release-artifacts" + artifacts_path.mkdir(exist_ok=True) + release_artifacts_listing: dict[pathlib.Path, int] = {} + continuation_token = None + while True: + kwargs: dict[str, str] = {} + if continuation_token: + kwargs["ContinuationToken"] = continuation_token + ret = s3.list_objects_v2( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Prefix=f"release-artifacts/{salt_version}", + FetchOwner=False, + **kwargs, + ) + contents = ret.pop("Contents", None) + if contents is None: + break + for entry in contents: + entry_path = pathlib.Path(entry["Key"]) + if entry_path.name.startswith("."): + continue + release_artifacts_listing[entry_path] = entry["Size"] + if not ret["IsTruncated"]: + break + continuation_token = ret["NextContinuationToken"] + + for entry_path, size in release_artifacts_listing.items(): + ctx.info(f" * {entry_path.name}") + local_path = artifacts_path / entry_path.name + with local_path.open("wb") as wfh: + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Downloading...", total=size) + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=str(entry_path), + Fileobj=wfh, + Callback=tools.utils.UpdateProgress(progress, task), + ) + + for artifact in artifacts_path.iterdir(): + if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"): + continue + tools.utils.gpg_sign(ctx, key_id, artifact) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, artifacts_path) + + release_message = f"""\ + # Welcome to Salt v{salt_version} + + | :exclamation: ATTENTION | + |:-------------------------------------------------------------------------------------------------------------------------| + | The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. | + | Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`). + """ + release_message_path = artifacts_path / "gh-release-body.md" + release_message_path.write_text(textwrap.dedent(release_message).strip()) + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.") + ctx.exit(0) + + if TYPE_CHECKING: + assert github_output is not None + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") + + releases = get_salt_releases(ctx, repository) + if Version(salt_version) >= releases[-1]: + make_latest = True + else: + make_latest = False + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"make-latest={json.dumps(make_latest)}\n") + + artifacts_to_upload = [] + for artifact in artifacts_path.iterdir(): + if artifact.suffix == ".patch": + continue + if artifact.name == release_message_path.name: + continue + artifacts_to_upload.append(str(artifact.resolve())) + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n") + ctx.exit(0) + + +def _get_repo_detailed_file_list( + bucket_name: str, + bucket_folder: str = "", + glob_match: str = "**", +) -> list[dict[str, Any]]: + s3 = boto3.client("s3") + listing: list[dict[str, Any]] = [] + continuation_token = None + while True: + kwargs: dict[str, str] = {} + if continuation_token: + kwargs["ContinuationToken"] = continuation_token + ret = s3.list_objects_v2( + Bucket=bucket_name, + Prefix=bucket_folder, + FetchOwner=False, + **kwargs, + ) + contents = ret.pop("Contents", None) + if contents is None: + break + for entry in contents: + if fnmatch.fnmatch(entry["Key"], glob_match): + listing.append(entry) + if not ret["IsTruncated"]: + break + continuation_token = ret["NextContinuationToken"] + return listing + + +def _get_repo_file_list( + bucket_name: str, bucket_folder: str, glob_match: str +) -> list[str]: + return [ + entry["Key"] + for entry in _get_repo_detailed_file_list( + bucket_name, bucket_folder, glob_match=glob_match + ) + ] + + +def _publish_repo( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + nightly_build: bool = False, + stage: bool = False, +): + """ + Publish packaging repositories. + """ + if nightly_build: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + elif stage: + bucket_name = tools.utils.STAGING_BUCKET_NAME + else: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + + ctx.info("Preparing upload ...") + s3 = boto3.client("s3") + to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {} + to_upload_paths: list[pathlib.Path] = [] + symlink_paths: list[str] = [] + uploaded_files: list[str] = [] + for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): + for dirname in dirnames: + path = pathlib.Path(dirpath, dirname) + if not path.is_symlink(): + continue + # This is a symlink, then we need to delete all files under + # that directory in S3 because S3 does not understand symlinks + # and we would end up adding files to that folder instead of + # replacing it. + try: + relpath = path.relative_to(repo_path) + ret = s3.list_objects( + Bucket=bucket_name, + Prefix=str(relpath), + ) + if "Contents" not in ret: + continue + objects = [] + for entry in ret["Contents"]: + objects.append({"Key": entry["Key"]}) + to_delete_paths[path] = objects + symlink_paths.append(str(relpath)) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + + for fpath in filenames: + path = pathlib.Path(dirpath, fpath) + to_upload_paths.append(path) + + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Deleting directories to override.", total=len(to_delete_paths) + ) + for base, objects in to_delete_paths.items(): + relpath = base.relative_to(repo_path) + bucket_uri = f"s3://{bucket_name}/{relpath}" + progress.update(task, description=f"Deleting {bucket_uri}") + try: + ret = s3.delete_objects( + Bucket=bucket_name, + Delete={"Objects": objects}, + ) + except ClientError: + log.exception(f"Failed to delete {bucket_uri}") + finally: + progress.update(task, advance=1) + + try: + ctx.info("Uploading repository ...") + for upload_path in to_upload_paths: + relpath = upload_path.relative_to(repo_path) + size = upload_path.stat().st_size + ctx.info(f" {relpath}") + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Uploading...", total=size) + s3.upload_file( + str(upload_path), + bucket_name, + str(relpath), + Callback=tools.utils.UpdateProgress(progress, task), + ExtraArgs={ + "Metadata": { + "x-amz-meta-salt-release-version": salt_version, + } + }, + ) + uploaded_files.append(str(relpath)) + if stage is True: + repo_files_path = f"release-artifacts/{salt_version}/.release-files.json" + ctx.info(f"Uploading {repo_files_path} ...") + s3.put_object( + Key=repo_files_path, + Bucket=bucket_name, + Body=json.dumps(uploaded_files).encode(), + Metadata={ + "x-amz-meta-salt-release-version": salt_version, + }, + ) + repo_symlinks_path = ( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + ctx.info(f"Uploading {repo_symlinks_path} ...") + s3.put_object( + Key=repo_symlinks_path, + Bucket=bucket_name, + Body=json.dumps(symlink_paths).encode(), + Metadata={ + "x-amz-meta-salt-release-version": salt_version, + }, + ) + except KeyboardInterrupt: + pass diff --git a/tools/utils.py b/tools/utils.py index cb4379c61e0..48dc4a17314 100644 --- a/tools/utils.py +++ b/tools/utils.py @@ -1,8 +1,12 @@ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations +import json import os import pathlib +import sys +from datetime import datetime +from typing import Any import packaging.version from ptscripts import Context @@ -16,6 +20,18 @@ from rich.progress import ( TransferSpeedColumn, ) +try: + import boto3 + from botocore.exceptions import ClientError +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023" SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod" @@ -116,6 +132,9 @@ class Version(packaging.version.Version): other = self.__class__(other) return super().__ne__(other) + def __str__(self): + return super().__str__().replace(".post", "-") + def __hash__(self): return hash(str(self)) @@ -142,9 +161,6 @@ def get_salt_releases(ctx: Context, repository: str) -> list[Version]: name = tag["name"] if name.startswith("v"): name = name[1:] - if "-" in name: - # We're not going to parse dash tags - continue if "docs" in name: # We're not going to consider doc tags continue @@ -161,11 +177,122 @@ def get_salt_releases(ctx: Context, repository: str) -> list[Version]: name = release["name"] if name.startswith("v"): name = name[1:] - if name and "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases + if name and "docs" not in name: + # We're not going to parse docs releases versions.add(Version(name)) name = release["tag_name"] - if "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases + if "docs" not in name: + # We're not going to parse docs releases versions.add(Version(name)) return sorted(versions) + + +def parse_versions(*versions: str) -> list[Version]: + _versions = [] + for version in set(versions): + if version == "latest": + continue + _versions.append(Version(version)) + if _versions: + _versions.sort(reverse=True) + return _versions + + +def get_repo_json_file_contents( + ctx: Context, + bucket_name: str, + repo_path: pathlib.Path, + repo_json_path: pathlib.Path, +) -> dict[str, Any]: + s3 = boto3.client("s3") + repo_json: dict[str, Any] = {} + try: + ret = s3.head_object( + Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path)) + ) + ctx.info( + f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file " + f"from bucket {bucket_name}" + ) + size = ret["ContentLength"] + with repo_json_path.open("wb") as wfh: + with create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Downloading...", total=size) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_json_path.relative_to(repo_path)), + Fileobj=wfh, + Callback=UpdateProgress(progress, task), + ) + with repo_json_path.open() as rfh: + repo_json = json.load(rfh) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + ctx.info(f"Could not find {repo_json_path} in bucket {bucket_name}") + if repo_json: + ctx.print(repo_json, soft_wrap=True) + return repo_json + + +def create_top_level_repo_path( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + distro: str, + distro_version: str | None = None, # pylint: disable=bad-whitespace + distro_arch: str | None = None, # pylint: disable=bad-whitespace + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace +): + create_repo_path = repo_path + if nightly_build_from: + create_repo_path = ( + create_repo_path + / "salt-dev" + / nightly_build_from + / datetime.utcnow().strftime("%Y-%m-%d") + ) + create_repo_path.mkdir(exist_ok=True, parents=True) + with ctx.chdir(create_repo_path.parent): + latest_nightly_symlink = pathlib.Path("latest") + if not latest_nightly_symlink.exists(): + ctx.info( + f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..." + ) + latest_nightly_symlink.symlink_to( + create_repo_path.name, target_is_directory=True + ) + elif "rc" in salt_version: + create_repo_path = create_repo_path / "salt_rc" + create_repo_path = create_repo_path / "salt" / "py3" / distro + if distro_version: + create_repo_path = create_repo_path / distro_version + if distro_arch: + create_repo_path = create_repo_path / distro_arch + create_repo_path.mkdir(exist_ok=True, parents=True) + return create_repo_path + + +def create_full_repo_path( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + distro: str, + distro_version: str | None = None, # pylint: disable=bad-whitespace + distro_arch: str | None = None, # pylint: disable=bad-whitespace + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace +): + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version, + distro_arch, + nightly_build_from=nightly_build_from, + ) + create_repo_path = create_repo_path / "minor" / salt_version + create_repo_path.mkdir(exist_ok=True, parents=True) + return create_repo_path diff --git a/tools/vm.py b/tools/vm.py index 944f2fe6cc2..f7b2837ae1b 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -720,41 +720,50 @@ class VM: client = boto3.client("ec2", region_name=self.region_name) # Let's search for the launch template corresponding to this AMI launch_template_name = None + next_token = "" try: - response = response = client.describe_launch_templates( - Filters=[ - { - "Name": "tag:spb:is-golden-image-template", - "Values": ["true"], - }, - { - "Name": "tag:spb:project", - "Values": ["salt-project"], - }, - { - "Name": "tag:spb:environment", - "Values": [environment], - }, - { - "Name": "tag:spb:image-id", - "Values": [self.config.ami], - }, - ] - ) - log.debug( - "Search for launch template response:\n%s", pprint.pformat(response) - ) - for details in response.get("LaunchTemplates"): - if launch_template_name is not None: - log.warning( - "Multiple launch templates for the same AMI. This is not " - "supposed to happen. Picked the first one listed: %s", - response, - ) - break - launch_template_name = details["LaunchTemplateName"] + while True: + response = response = client.describe_launch_templates( + Filters=[ + { + "Name": "tag:spb:is-golden-image-template", + "Values": ["true"], + }, + { + "Name": "tag:spb:project", + "Values": ["salt-project"], + }, + { + "Name": "tag:spb:environment", + "Values": [environment], + }, + { + "Name": "tag:spb:image-id", + "Values": [self.config.ami], + }, + ], + NextToken=next_token, + ) + log.debug( + "Search for launch template response:\n%s", + pprint.pformat(response), + ) + for details in response.get("LaunchTemplates"): + if launch_template_name is not None: + log.warning( + "Multiple launch templates for the same AMI. This is not " + "supposed to happen. Picked the first one listed: %s", + response, + ) + break + launch_template_name = details["LaunchTemplateName"] - if launch_template_name is None: + if launch_template_name is not None: + break + + next_token = response.get("NextToken") + if next_token: + continue self.ctx.error(f"Could not find a launch template for {self.name!r}") self.ctx.exit(1) except ClientError as exc: From 13b9bfdae891d4fb228367a834689c221115de0c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sun, 16 Jul 2023 09:37:08 +0100 Subject: [PATCH 04/73] Default to self-hosted runners on private repo Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 4 ++-- .github/workflows/nightly.yml | 4 ++-- .github/workflows/scheduled.yml | 4 ++-- .github/workflows/staging.yml | 2 +- .github/workflows/templates/ci.yml.jinja | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b525d11a150..1bef64192e3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -260,7 +260,7 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: @@ -394,7 +394,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 73876713240..a6ee739132a 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -305,7 +305,7 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: @@ -444,7 +444,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index f61782b8a93..98dfdeb4fa6 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -295,7 +295,7 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: @@ -429,7 +429,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c30b9877473..1ff8da1a9b2 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -431,7 +431,7 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 30a82d331eb..405aefe53ee 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -52,7 +52,7 @@ - x86_64 <%- else %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} <%- endif %> needs: - prepare-workflow @@ -213,7 +213,7 @@ needs: - prepare-workflow - prepare-release - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: actions/checkout@v3 From 2b1554002cd34ba5363447e87916025ae22a1fb9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 14 Jul 2023 19:32:28 +0100 Subject: [PATCH 05/73] Add `actions: read` to see if we can fix the set pipeline exit status job Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 1 + .github/workflows/staging.yml | 1 + .github/workflows/templates/layout.yml.jinja | 3 +++ 3 files changed, 5 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1bef64192e3..96c8b435a75 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,6 +22,7 @@ env: permissions: contents: read # for dorny/paths-filter to fetch a list of changed files pull-requests: read # for dorny/paths-filter to read pull requests + actions: read # for technote-space/workflow-conclusion-action to get the job statuses concurrency: # Concurrency is defined in a way that concurrent builds against branches do diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 1ff8da1a9b2..3f775e1dba2 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -39,6 +39,7 @@ env: permissions: contents: read # for dorny/paths-filter to fetch a list of changed files pull-requests: read # for dorny/paths-filter to read pull requests + actions: read # for technote-space/workflow-conclusion-action to get the job statuses concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }} diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 96394731244..fa9bf24aaae 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -43,6 +43,9 @@ env: permissions: contents: read # for dorny/paths-filter to fetch a list of changed files pull-requests: read # for dorny/paths-filter to read pull requests +<%- if workflow_slug not in ("nightly", "scheduled") %> + actions: read # for technote-space/workflow-conclusion-action to get the job statuses +<%- endif %> <%- endblock permissions %> From bdb77bfee751f672249cb3673793ff75301d3740 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 18 Jul 2023 06:34:22 +0100 Subject: [PATCH 06/73] Bump to `pyyaml==6.0.1` due to https://github.com/yaml/pyyaml/issues/601 Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 16 ++++++++-------- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/docs.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/invoke.txt | 2 +- requirements/static/ci/py3.10/lint.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- .../static/ci/py3.10/pkgtests-windows.txt | 2 +- requirements/static/ci/py3.10/pkgtests.txt | 2 +- requirements/static/ci/py3.10/tools.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/tools.txt | 2 +- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/docs.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 2 +- requirements/static/ci/py3.7/invoke.txt | 2 +- requirements/static/ci/py3.7/lint.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.7/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/docs.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/invoke.txt | 2 +- requirements/static/ci/py3.8/lint.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/docs.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/invoke.txt | 2 +- requirements/static/ci/py3.9/lint.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/tools.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- requirements/static/pkg/py3.10/darwin.txt | 2 +- requirements/static/pkg/py3.10/freebsd.txt | 2 +- requirements/static/pkg/py3.10/linux.txt | 2 +- requirements/static/pkg/py3.10/windows.txt | 2 +- requirements/static/pkg/py3.7/freebsd.txt | 2 +- requirements/static/pkg/py3.7/linux.txt | 2 +- requirements/static/pkg/py3.7/windows.txt | 2 +- requirements/static/pkg/py3.8/freebsd.txt | 2 +- requirements/static/pkg/py3.8/linux.txt | 2 +- requirements/static/pkg/py3.8/windows.txt | 2 +- requirements/static/pkg/py3.9/darwin.txt | 2 +- requirements/static/pkg/py3.9/freebsd.txt | 2 +- requirements/static/pkg/py3.9/linux.txt | 2 +- requirements/static/pkg/py3.9/windows.txt | 2 +- 50 files changed, 57 insertions(+), 57 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dd68eb47bed..d1f05f479f4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,7 +56,7 @@ repos: - pre-commit-checks additional_dependencies: - boto3==1.21.46 - - pyyaml==6.0 + - pyyaml==6.0.1 - jinja2==3.1.2 - packaging==23.0 - virustotal3==1.0.8 @@ -70,7 +70,7 @@ repos: - generate-workflows additional_dependencies: - boto3==1.21.46 - - pyyaml==6.0 + - pyyaml==6.0.1 - jinja2==3.1.2 - packaging==23.0 - virustotal3==1.0.8 @@ -85,7 +85,7 @@ repos: - actionlint additional_dependencies: - boto3==1.21.46 - - pyyaml==6.0 + - pyyaml==6.0.1 - jinja2==3.1.2 - packaging==23.0 @@ -1156,7 +1156,7 @@ repos: - docs.check additional_dependencies: - blessings==1.7 - - pyyaml==6.0 + - pyyaml==6.0.1 - distro==1.7.0 - jinja2==3.0.3 - msgpack==1.0.3 @@ -1175,7 +1175,7 @@ repos: - filemap.check additional_dependencies: - blessings==1.7 - - pyyaml==6.0 + - pyyaml==6.0.1 - distro==1.7.0 - jinja2==3.0.3 - msgpack==1.0.3 @@ -1198,7 +1198,7 @@ repos: - loader.check-virtual additional_dependencies: - blessings==1.7 - - pyyaml==6.0 + - pyyaml==6.0.1 - distro==1.7.0 - jinja2==3.0.3 - msgpack==1.0.3 @@ -1221,7 +1221,7 @@ repos: - docstrings.check additional_dependencies: - blessings==1.7 - - pyyaml==6.0 + - pyyaml==6.0.1 - distro==1.7.0 - jinja2==3.0.3 - msgpack==1.0.3 @@ -1246,7 +1246,7 @@ repos: - --error-on-known-failures additional_dependencies: - blessings==1.7 - - pyyaml==6.0 + - pyyaml==6.0.1 - distro==1.7.0 - jinja2==3.0.3 - msgpack==1.0.3 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index ed2c9899611..21a48e0114b 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -750,7 +750,7 @@ pyvmomi==7.0.2 # via -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 25f965b961b..3daa014fe10 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -743,7 +743,7 @@ pytz==2022.1 # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 3ab9f5eab1a..42cc0af22f9 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -130,7 +130,7 @@ pytz==2022.1 # -c requirements/static/ci/py3.10/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index dabffdb3686..39b1a42eafa 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -742,7 +742,7 @@ pytz==2022.1 # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.10/invoke.txt b/requirements/static/ci/py3.10/invoke.txt index 591666c8468..bcca631927a 100644 --- a/requirements/static/ci/py3.10/invoke.txt +++ b/requirements/static/ci/py3.10/invoke.txt @@ -8,7 +8,7 @@ blessings==1.7 # via -r requirements/static/ci/invoke.in invoke==1.4.1 # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/static/ci/invoke.in six==1.16.0 # via blessings diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index 9868bef0f40..555e96b0a58 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -715,7 +715,7 @@ pytz==2022.1 # twilio pyvmomi==7.0.2 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index ce83b819418..12fbc95d22e 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -768,7 +768,7 @@ pytz==2022.1 # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.10/pkgtests-windows.txt b/requirements/static/ci/py3.10/pkgtests-windows.txt index 2ba612f0a16..d78bb773a5e 100644 --- a/requirements/static/ci/py3.10/pkgtests-windows.txt +++ b/requirements/static/ci/py3.10/pkgtests-windows.txt @@ -136,7 +136,7 @@ pywin32==305 # via # pytest-skip-markers # wmi -pyyaml==6.0 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via diff --git a/requirements/static/ci/py3.10/pkgtests.txt b/requirements/static/ci/py3.10/pkgtests.txt index 4d48aa6cee5..7d795f16f38 100644 --- a/requirements/static/ci/py3.10/pkgtests.txt +++ b/requirements/static/ci/py3.10/pkgtests.txt @@ -123,7 +123,7 @@ pytest==7.2.1 # pytest-tempdir pytz==2022.7.1 # via tempora -pyyaml==6.0 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 # via diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt index af55d67560f..8fcddf7da97 100644 --- a/requirements/static/ci/py3.10/tools.txt +++ b/requirements/static/ci/py3.10/tools.txt @@ -38,7 +38,7 @@ python-dateutil==2.8.2 # via botocore python-tools-scripts==0.12.0 # via -r requirements/static/ci/tools.in -pyyaml==6.0 +pyyaml==6.0.1 # via -r requirements/static/ci/tools.in requests==2.28.2 # via diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 3f1db8377e8..950ea4f0801 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -310,7 +310,7 @@ pywin32==305 # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt index bdd05d1f2ee..556718941f1 100644 --- a/requirements/static/ci/py3.11/tools.txt +++ b/requirements/static/ci/py3.11/tools.txt @@ -38,7 +38,7 @@ python-dateutil==2.8.2 # via botocore python-tools-scripts==0.12.0 # via -r requirements/static/ci/tools.in -pyyaml==6.0 +pyyaml==6.0.1 # via -r requirements/static/ci/tools.in requests==2.31.0 # via diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 4cf6301ebb4..c939092dfb9 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -794,7 +794,7 @@ pyvmomi==7.0.2 # via -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.7/docs.txt b/requirements/static/ci/py3.7/docs.txt index 78983020144..874b37d15cb 100644 --- a/requirements/static/ci/py3.7/docs.txt +++ b/requirements/static/ci/py3.7/docs.txt @@ -138,7 +138,7 @@ pytz==2022.1 # -c requirements/static/ci/py3.7/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 573134b8435..29c43fdec4d 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -780,7 +780,7 @@ pytz==2022.1 # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.7/invoke.txt b/requirements/static/ci/py3.7/invoke.txt index e2cad5c72e0..669f7a7fce8 100644 --- a/requirements/static/ci/py3.7/invoke.txt +++ b/requirements/static/ci/py3.7/invoke.txt @@ -8,7 +8,7 @@ blessings==1.7 # via -r requirements/static/ci/invoke.in invoke==1.4.1 # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/static/ci/invoke.in six==1.16.0 # via blessings diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index 64f47a07f5b..333adceda2d 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -760,7 +760,7 @@ pytz==2022.1 # twilio pyvmomi==7.0.2 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index ff0ca6be36f..7fa2abbabad 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -808,7 +808,7 @@ pytz==2022.1 # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index b43b83d8678..275fb9ae3a3 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -324,7 +324,7 @@ pywin32==305 # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 1a7eff61340..8f2d9d31aaf 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -783,7 +783,7 @@ pyvmomi==7.0.2 # via -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index fb8348f70bc..8a7e9a974c5 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -130,7 +130,7 @@ pytz==2022.1 # -c requirements/static/ci/py3.8/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 71ded2f0ecb..a2f2a783226 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -770,7 +770,7 @@ pytz==2022.1 # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.8/invoke.txt b/requirements/static/ci/py3.8/invoke.txt index 356dd421308..d3850c6bde7 100644 --- a/requirements/static/ci/py3.8/invoke.txt +++ b/requirements/static/ci/py3.8/invoke.txt @@ -8,7 +8,7 @@ blessings==1.7 # via -r requirements/static/ci/invoke.in invoke==1.4.1 # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/static/ci/invoke.in six==1.16.0 # via blessings diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index 425922348d5..c31ee52d024 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -751,7 +751,7 @@ pytz==2022.1 # twilio pyvmomi==7.0.2 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index dff8d779093..977d5546bff 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -796,7 +796,7 @@ pytz==2022.1 # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 4dca1d45370..2b80325df07 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -312,7 +312,7 @@ pywin32==305 # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 5e044b7ac97..999f82d93c1 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -786,7 +786,7 @@ pyvmomi==7.0.2 # via -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 59dc17fefc3..fe14545c269 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -774,7 +774,7 @@ pytz==2022.1 # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index b8f13d0cb66..aeba29cca9c 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -134,7 +134,7 @@ pytz==2022.1 # -c requirements/static/ci/py3.9/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 950f0afd887..f0ccb1afe42 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -773,7 +773,7 @@ pytz==2022.1 # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/ci/py3.9/invoke.txt b/requirements/static/ci/py3.9/invoke.txt index 3e1879c97e1..e9e8bf56b12 100644 --- a/requirements/static/ci/py3.9/invoke.txt +++ b/requirements/static/ci/py3.9/invoke.txt @@ -8,7 +8,7 @@ blessings==1.7 # via -r requirements/static/ci/invoke.in invoke==1.4.1 # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/static/ci/invoke.in six==1.16.0 # via blessings diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 2cc6daf69c6..d5d6d93d168 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -752,7 +752,7 @@ pytz==2022.1 # twilio pyvmomi==7.0.2 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 3461c0c3808..bbe14b08986 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -801,7 +801,7 @@ pytz==2022.1 # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # ansible-core diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt index 149f2576546..75862497bb6 100644 --- a/requirements/static/ci/py3.9/tools.txt +++ b/requirements/static/ci/py3.9/tools.txt @@ -38,7 +38,7 @@ python-dateutil==2.8.2 # via botocore python-tools-scripts==0.12.0 # via -r requirements/static/ci/tools.in -pyyaml==6.0 +pyyaml==6.0.1 # via -r requirements/static/ci/tools.in requests==2.28.2 # via diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index e70f94b0c9b..2ace2a961ee 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -313,7 +313,7 @@ pywin32==305 # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -r requirements/base.txt # clustershell diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index c00d0979971..8ca6ea4a1e3 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -89,7 +89,7 @@ python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index 0c1a98b17f1..8e73e8ab0a3 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -77,7 +77,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index 50a863a604c..5995cc1f3c8 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -77,7 +77,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 6c8a45998a3..e0e09ed37d3 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -104,7 +104,7 @@ pywin32==305 # via # -r requirements/windows.txt # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index 06b8baff901..e335093df11 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -75,7 +75,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index 6f5f4715f1a..5317ce105a4 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -75,7 +75,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index 21965d5b388..a0a099db70d 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -105,7 +105,7 @@ pywin32==305 # -r requirements/windows.txt # cherrypy # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index f0f9cb52c14..657756f6e40 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -75,7 +75,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index 01da74d3da3..f62162cd061 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -75,7 +75,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 33734387c70..74cfc515a33 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -105,7 +105,7 @@ pywin32==305 # -r requirements/windows.txt # cherrypy # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 58c783cfb10..9b26afea3f8 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -89,7 +89,7 @@ python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 2aa66565594..840a728237f 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -77,7 +77,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index 704ced76aea..f6a7eced50b 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -77,7 +77,7 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index cb76bdf0b3d..0f727304f31 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -105,7 +105,7 @@ pywin32==305 # -r requirements/windows.txt # cherrypy # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt From 7769109f5ff7deabc45124de9f2d415a4aefcbb3 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 25 May 2023 06:56:46 +0100 Subject: [PATCH 07/73] Fix lint issues after the `pyyaml` package upgrade Signed-off-by: Pedro Algarvio --- salt/modules/win_iis.py | 5 ++--- tests/integration/utils/test_win_runas.py | 12 ++++++------ 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/salt/modules/win_iis.py b/salt/modules/win_iis.py index 0c97aa84896..42ec335bf32 100644 --- a/salt/modules/win_iis.py +++ b/salt/modules/win_iis.py @@ -13,10 +13,9 @@ import logging import os import re -import yaml - import salt.utils.json import salt.utils.platform +import salt.utils.yaml from salt.exceptions import CommandExecutionError, SaltInvocationError log = logging.getLogger(__name__) @@ -187,7 +186,7 @@ def _prepare_settings(pspath, settings): match = re.search(r"Collection\[(\{.*\})\]", setting["name"]) if match: name = setting["name"][: match.start(1) - 1] - match_dict = yaml.load(match.group(1)) + match_dict = salt.utils.yaml.load(match.group(1)) index = _collection_match_to_index( pspath, setting["filter"], name, match_dict ) diff --git a/tests/integration/utils/test_win_runas.py b/tests/integration/utils/test_win_runas.py index cd8c95b9da5..41d4169d945 100644 --- a/tests/integration/utils/test_win_runas.py +++ b/tests/integration/utils/test_win_runas.py @@ -10,10 +10,10 @@ import time import traceback import pytest -import yaml import salt.utils.files import salt.utils.win_runas +import salt.utils.yaml from tests.support.case import ModuleCase from tests.support.helpers import with_system_user from tests.support.mock import Mock @@ -658,7 +658,7 @@ class RunAsTest(ModuleCase): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 1, ret @with_system_user( @@ -676,7 +676,7 @@ class RunAsTest(ModuleCase): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 1, ret @with_system_user( @@ -698,7 +698,7 @@ class RunAsTest(ModuleCase): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 0, ret @with_system_user( @@ -720,7 +720,7 @@ class RunAsTest(ModuleCase): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 0, ret def test_runas_service_system_user(self): @@ -735,5 +735,5 @@ class RunAsTest(ModuleCase): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 0, ret From 2d5c36c125601050ef24cc3d3a97774fc87d7488 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 May 2023 19:19:37 +0100 Subject: [PATCH 08/73] Skip `tests/unit/{modules,states}/test_zcbuildout.py` on windows. It needs special work on the golden images to get SSL to properly work. These steps are required because the code being tested is using `easy_install` which does not know how to get certificates from `certifi`. Since `easy_install` is too old, and deprecated, the extra work is not worth it, plus, they are still being tested on other platforms. Signed-off-by: Pedro Algarvio --- tests/unit/modules/test_zcbuildout.py | 9 +++++++++ tests/unit/states/test_zcbuildout.py | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/tests/unit/modules/test_zcbuildout.py b/tests/unit/modules/test_zcbuildout.py index f793e3fc3f8..ac98435ffa0 100644 --- a/tests/unit/modules/test_zcbuildout.py +++ b/tests/unit/modules/test_zcbuildout.py @@ -19,6 +19,15 @@ from tests.support.mixins import LoaderModuleMockMixin from tests.support.runtests import RUNTIME_VARS from tests.support.unit import TestCase +pytestmark = [ + pytest.mark.skip_on_windows( + reason=( + "Special steps are required for proper SSL validation because " + "`easy_install` is too old(and deprecated)." + ) + ) +] + KNOWN_VIRTUALENV_BINARY_NAMES = ( "virtualenv", "virtualenv2", diff --git a/tests/unit/states/test_zcbuildout.py b/tests/unit/states/test_zcbuildout.py index db6013076d1..b5f919ac6b2 100644 --- a/tests/unit/states/test_zcbuildout.py +++ b/tests/unit/states/test_zcbuildout.py @@ -10,6 +10,15 @@ import salt.utils.path from tests.support.runtests import RUNTIME_VARS from tests.unit.modules.test_zcbuildout import KNOWN_VIRTUALENV_BINARY_NAMES, Base +pytestmark = [ + pytest.mark.skip_on_windows( + reason=( + "Special steps are required for proper SSL validation because " + "`easy_install` is too old(and deprecated)." + ) + ) +] + @pytest.mark.skip_if_binaries_missing(*KNOWN_VIRTUALENV_BINARY_NAMES, check_all=False) @pytest.mark.requires_network From f82860b8ad3ee786762fa02fa1a6eaf6e24dc8d4 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 26 May 2023 17:01:20 -0700 Subject: [PATCH 09/73] Do not fail on bad message pack message --- salt/transport/zeromq.py | 6 ++++- tests/pytests/unit/transport/test_zeromq.py | 30 +++++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index 3ec7f7726c4..7cc6b9987f5 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -428,7 +428,11 @@ class RequestServer(salt.transport.base.DaemonizedRequestServer): @salt.ext.tornado.gen.coroutine def handle_message(self, stream, payload): - payload = self.decode_payload(payload) + try: + payload = self.decode_payload(payload) + except salt.exceptions.SaltDeserializationError: + self.stream.send(self.encode_payload({"msg": "bad load"})) + return # XXX: Is header really needed? reply = yield self.message_handler(payload) self.stream.send(self.encode_payload(reply)) diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 10bb4917b83..73316026e4a 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -11,6 +11,7 @@ import threading import time import uuid +import msgpack import pytest import salt.channel.client @@ -1404,3 +1405,32 @@ async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop): assert "sig" in ret ret = client.auth.handle_signin_response(signin_payload, ret) assert ret == "retry" + + +async def test_req_server_garbage_request(io_loop): + """ + Validate invalid msgpack messages will not raise exceptions in the + RequestServers's message handler. + """ + opts = salt.config.master_config("") + request_server = salt.transport.zeromq.RequestServer(opts) + + def message_handler(payload): + return payload + + request_server.post_fork(message_handler, io_loop) + + byts = msgpack.dumps({"foo": "bar"}) + badbyts = byts[:3] + b"^M" + byts[3:] + + valid_response = msgpack.dumps("Invalid payload") + + with MagicMock() as stream: + request_server.stream = stream + + try: + await request_server.handle_message(stream, badbyts) + except Exception as exc: # pylint: disable=broad-except + pytest.fail("Exception was raised {}".format(exc)) + + request_server.stream.send.assert_called_once_with(valid_response) From d13954e6639734c2ada3accc1ed6d5cd9667de86 Mon Sep 17 00:00:00 2001 From: Thomas Phipps Date: Wed, 19 Jul 2023 17:40:51 +0000 Subject: [PATCH 10/73] fix test and add changeog --- changelog/64370.security.md | 1 + tests/pytests/unit/transport/test_zeromq.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog/64370.security.md diff --git a/changelog/64370.security.md b/changelog/64370.security.md new file mode 100644 index 00000000000..4b6171e5d69 --- /dev/null +++ b/changelog/64370.security.md @@ -0,0 +1 @@ +fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 73316026e4a..40ff4bc1b05 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -1423,7 +1423,7 @@ async def test_req_server_garbage_request(io_loop): byts = msgpack.dumps({"foo": "bar"}) badbyts = byts[:3] + b"^M" + byts[3:] - valid_response = msgpack.dumps("Invalid payload") + valid_response = msgpack.dumps({"msg": "bad load"}) with MagicMock() as stream: request_server.stream = stream From b3952f8d7cc3ddad2947aa49b9b6644db7acf0d0 Mon Sep 17 00:00:00 2001 From: Thomas Phipps Date: Wed, 19 Jul 2023 19:03:48 +0000 Subject: [PATCH 11/73] MagicMock is not a context manager. with needs __enter__ and __exit__, but we dont for this test --- tests/pytests/unit/transport/test_zeromq.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 40ff4bc1b05..97e1669a4c6 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -1425,12 +1425,12 @@ async def test_req_server_garbage_request(io_loop): valid_response = msgpack.dumps({"msg": "bad load"}) - with MagicMock() as stream: - request_server.stream = stream + stream = MagicMock() + request_server.stream = stream - try: - await request_server.handle_message(stream, badbyts) - except Exception as exc: # pylint: disable=broad-except - pytest.fail("Exception was raised {}".format(exc)) + try: + await request_server.handle_message(stream, badbyts) + except Exception as exc: # pylint: disable=broad-except + pytest.fail("Exception was raised {}".format(exc)) request_server.stream.send.assert_called_once_with(valid_response) From c069d99ea8a921c9c3fe527613a1de75166a65d1 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Fri, 14 Jul 2023 09:06:11 -0600 Subject: [PATCH 12/73] [3006.2] Update cryptography --- changelog/64595.security.md | 11 +++++++++++ requirements/darwin.txt | 4 ++-- requirements/static/ci/py3.10/cloud.txt | 4 ++-- requirements/static/ci/py3.10/darwin.txt | 4 ++-- requirements/static/ci/py3.10/freebsd.txt | 5 +++-- requirements/static/ci/py3.10/lint.txt | 4 ++-- requirements/static/ci/py3.10/linux.txt | 4 ++-- requirements/static/ci/py3.10/windows.txt | 4 ++-- requirements/static/ci/py3.7/cloud.txt | 4 ++-- requirements/static/ci/py3.7/freebsd.txt | 5 +++-- requirements/static/ci/py3.7/lint.txt | 4 ++-- requirements/static/ci/py3.7/linux.txt | 4 ++-- requirements/static/ci/py3.7/windows.txt | 4 ++-- requirements/static/ci/py3.8/cloud.txt | 4 ++-- requirements/static/ci/py3.8/freebsd.txt | 5 +++-- requirements/static/ci/py3.8/lint.txt | 4 ++-- requirements/static/ci/py3.8/linux.txt | 4 ++-- requirements/static/ci/py3.8/windows.txt | 4 ++-- requirements/static/ci/py3.9/cloud.txt | 4 ++-- requirements/static/ci/py3.9/darwin.txt | 4 ++-- requirements/static/ci/py3.9/freebsd.txt | 5 +++-- requirements/static/ci/py3.9/lint.txt | 4 ++-- requirements/static/ci/py3.9/linux.txt | 4 ++-- requirements/static/ci/py3.9/windows.txt | 4 ++-- requirements/static/pkg/freebsd.in | 3 ++- requirements/static/pkg/linux.in | 4 ++-- requirements/static/pkg/py3.10/darwin.txt | 4 ++-- requirements/static/pkg/py3.10/freebsd.txt | 8 +++++--- requirements/static/pkg/py3.10/linux.txt | 4 ++-- requirements/static/pkg/py3.10/windows.txt | 4 ++-- requirements/static/pkg/py3.7/freebsd.txt | 8 +++++--- requirements/static/pkg/py3.7/linux.txt | 4 ++-- requirements/static/pkg/py3.7/windows.txt | 4 ++-- requirements/static/pkg/py3.8/freebsd.txt | 8 +++++--- requirements/static/pkg/py3.8/linux.txt | 4 ++-- requirements/static/pkg/py3.8/windows.txt | 4 ++-- requirements/static/pkg/py3.9/darwin.txt | 4 ++-- requirements/static/pkg/py3.9/freebsd.txt | 8 +++++--- requirements/static/pkg/py3.9/linux.txt | 4 ++-- requirements/static/pkg/py3.9/windows.txt | 4 ++-- requirements/windows.txt | 4 ++-- 41 files changed, 107 insertions(+), 83 deletions(-) create mode 100644 changelog/64595.security.md diff --git a/changelog/64595.security.md b/changelog/64595.security.md new file mode 100644 index 00000000000..737603b7704 --- /dev/null +++ b/changelog/64595.security.md @@ -0,0 +1,11 @@ +Upgrade to `cryptography==41.0.2`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-5cpq-8wj7-hf2v) + +This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + +Also resolves the following cryptography advisories: + +Due to: + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + +There is no security upgrade available for Py3.5 diff --git a/requirements/darwin.txt b/requirements/darwin.txt index be8a547ae8e..9edba2a9cd9 100644 --- a/requirements/darwin.txt +++ b/requirements/darwin.txt @@ -5,13 +5,13 @@ apache-libcloud>=2.4.0 backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' cherrypy>=17.4.1 -cryptography>=39.0.1 +cryptography>=41.0.2 gitpython>=3.1.30; python_version >= '3.7' idna>=2.8 linode-python>=1.1.1 pyasn1>=0.4.8 pycparser>=2.21 -pyopenssl>=19.0.0 +pyopenssl>=23.2.0 python-dateutil>=2.8.0 python-gnupg>=0.4.4 setproctitle>=1.2.3 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 21a48e0114b..0a5fc41eb86 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -385,7 +385,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -669,7 +669,7 @@ pyjwt==2.4.0 # via adal pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 3daa014fe10..7103b45b1aa 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -386,7 +386,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/darwin.txt # adal @@ -670,7 +670,7 @@ pyjwt==2.4.0 # via adal pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/darwin.txt # etcd3-py diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 39b1a42eafa..c5c99cda87b 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -384,8 +384,9 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via + # -r requirements/static/pkg/freebsd.in # adal # azure-cosmosdb-table # azure-keyvault @@ -669,7 +670,7 @@ pyjwt==2.4.0 # via adal pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/freebsd.in # etcd3-py diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index 555e96b0a58..b152b352f3d 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -390,7 +390,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -675,7 +675,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 12fbc95d22e..1b81e68fa16 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -398,7 +398,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -689,7 +689,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 950ea4f0801..c7736afe27d 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -71,7 +71,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # etcd3-py @@ -237,7 +237,7 @@ pymssql==2.2.7 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index c939092dfb9..54e60b5f31a 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -392,7 +392,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -711,7 +711,7 @@ pyjwt==2.4.0 # via adal pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 29c43fdec4d..a2540a91d62 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -391,8 +391,9 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via + # -r requirements/static/pkg/freebsd.in # adal # azure-cosmosdb-table # azure-keyvault @@ -705,7 +706,7 @@ pyjwt==2.4.0 # via adal pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/freebsd.in # etcd3-py diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index 333adceda2d..3c462854de6 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -399,7 +399,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -718,7 +718,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 7fa2abbabad..6be7aa6c23e 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -405,7 +405,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -725,7 +725,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index 275fb9ae3a3..012380b7f01 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -77,7 +77,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # etcd3-py @@ -250,7 +250,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 8f2d9d31aaf..3b902826f8f 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -390,7 +390,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -700,7 +700,7 @@ pyjwt==2.4.0 # via adal pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index a2f2a783226..8a8f1413d13 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -389,8 +389,9 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via + # -r requirements/static/pkg/freebsd.in # adal # azure-cosmosdb-table # azure-keyvault @@ -695,7 +696,7 @@ pyjwt==2.4.0 # via adal pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/freebsd.in # etcd3-py diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index c31ee52d024..91fe6e4c095 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -397,7 +397,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -709,7 +709,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 977d5546bff..500d256ec0a 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -403,7 +403,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -715,7 +715,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 2b80325df07..b3dbeb74b09 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -73,7 +73,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # etcd3-py @@ -238,7 +238,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 999f82d93c1..6fbd3af1e6e 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -390,7 +390,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -703,7 +703,7 @@ pyjwt==2.4.0 # via adal pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index fe14545c269..afd6f276d91 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -391,7 +391,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/darwin.txt # adal @@ -699,7 +699,7 @@ pyjwt==2.4.0 # via adal pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/darwin.txt # etcd3-py diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index f0ccb1afe42..4d4c68692f1 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -389,8 +389,9 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via + # -r requirements/static/pkg/freebsd.in # adal # azure-cosmosdb-table # azure-keyvault @@ -698,7 +699,7 @@ pyjwt==2.4.0 # via adal pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/freebsd.in # etcd3-py diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index d5d6d93d168..80fef45547a 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -395,7 +395,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -710,7 +710,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.4.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index bbe14b08986..aba6918f419 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -405,7 +405,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # adal @@ -720,7 +720,7 @@ pymysql==1.0.2 ; python_version > "3.5" # via -r requirements/static/ci/linux.in pynacl==1.3.0 # via paramiko -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/static/pkg/linux.in # etcd3-py diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 2ace2a961ee..7efff7ade82 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -73,7 +73,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # etcd3-py @@ -239,7 +239,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/pkg/freebsd.in b/requirements/static/pkg/freebsd.in index 87c87c99e39..453c2c3633c 100644 --- a/requirements/static/pkg/freebsd.in +++ b/requirements/static/pkg/freebsd.in @@ -1,9 +1,10 @@ # This file only exists to trigger the right static compiled requirements destination # Any non hard dependencies of Salt for FreeBSD can go here cherrypy +cryptography>=41.0.2 backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' pycparser>=2.21; python_version >= '3.9' -pyopenssl>=19.0.0 +pyopenssl>=23.2.0 python-dateutil>=2.8.0 python-gnupg>=0.4.4 setproctitle>=1.2.3 diff --git a/requirements/static/pkg/linux.in b/requirements/static/pkg/linux.in index 3555af3b9c3..fb3f0a8e154 100644 --- a/requirements/static/pkg/linux.in +++ b/requirements/static/pkg/linux.in @@ -3,11 +3,11 @@ cherrypy backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' pycparser>=2.21; python_version >= '3.9' -pyopenssl>=19.0.0 +pyopenssl>=23.2.0 python-dateutil>=2.8.0 python-gnupg>=0.4.4 rpm-vercmp setproctitle>=1.2.3 timelib>=0.2.5 importlib-metadata>=3.3.0 -cryptography>=39.0.1 +cryptography>=41.0.2 diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index 8ca6ea4a1e3..daf832bbb38 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -18,7 +18,7 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/darwin.txt # pyopenssl @@ -81,7 +81,7 @@ pycparser==2.21 # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 # via -r requirements/darwin.txt diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index 8e73e8ab0a3..d23b7f92d80 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -16,8 +16,10 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 - # via pyopenssl +cryptography==41.0.2 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt @@ -69,7 +71,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index 5995cc1f3c8..bba6c238e71 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # pyopenssl @@ -69,7 +69,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index e0e09ed37d3..04ec542cbe6 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # pyopenssl @@ -90,7 +90,7 @@ pymssql==2.2.7 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index e335093df11..2d977066b3d 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -16,8 +16,10 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 - # via pyopenssl +cryptography==41.0.2 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt @@ -67,7 +69,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index 5317ce105a4..624cc1b8f67 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # pyopenssl @@ -67,7 +67,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index a0a099db70d..8169d3d689b 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # pyopenssl @@ -90,7 +90,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index 657756f6e40..98f692b5e32 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -16,8 +16,10 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 - # via pyopenssl +cryptography==41.0.2 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt @@ -67,7 +69,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index f62162cd061..135bf3051a3 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # pyopenssl @@ -67,7 +67,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 74cfc515a33..3d07e6c272d 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # pyopenssl @@ -90,7 +90,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 9b26afea3f8..a0eb9b18cdf 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -18,7 +18,7 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/darwin.txt # pyopenssl @@ -81,7 +81,7 @@ pycparser==2.21 # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 # via -r requirements/darwin.txt diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 840a728237f..712c3273927 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -16,8 +16,10 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 - # via pyopenssl +cryptography==41.0.2 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt @@ -69,7 +71,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index f6a7eced50b..dc3dbc1a14b 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/static/pkg/linux.in # pyopenssl @@ -69,7 +69,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index 0f727304f31..dc6a8f55262 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==39.0.2 +cryptography==41.0.2 # via # -r requirements/windows.txt # pyopenssl @@ -90,7 +90,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==23.0.0 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt diff --git a/requirements/windows.txt b/requirements/windows.txt index dacdbd1279a..77efe2ca55b 100644 --- a/requirements/windows.txt +++ b/requirements/windows.txt @@ -10,7 +10,7 @@ backports.ssl-match-hostname>=3.7.0.1; python_version < '3.7' certifi>=2022.12.07 cffi>=1.14.5 cherrypy>=18.6.1 -cryptography>=39.0.1 +cryptography>=41.0.2 gitpython>=3.1.30; python_version >= '3.7' ioloop>=0.1a0 lxml>=4.6.3 @@ -18,7 +18,7 @@ pyasn1>=0.4.8 pycparser>=2.21 pymssql>=2.2.1 pymysql>=1.0.2 -pyopenssl>=20.0.1 +pyopenssl>=23.2.0 python-dateutil>=2.8.1 python-gnupg>=0.4.7 requests>=2.25.1 From a7ffe5a62611d4e98da1bc7768d9b4f5e07c7e23 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 19 Jul 2023 16:01:36 +0100 Subject: [PATCH 13/73] Check releases on both the public and private repositories Signed-off-by: Pedro Algarvio --- .github/workflows/release.yml | 3 +++ .github/workflows/staging.yml | 3 +++ .github/workflows/templates/layout.yml.jinja | 4 ++++ .github/workflows/templates/release.yml.jinja | 3 +++ 4 files changed, 13 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 28ec5fa1366..a301591ed60 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -87,6 +87,9 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi - name: Check Release Staged env: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 3f775e1dba2..a2d1bef32b1 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -192,6 +192,9 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi - name: Write Changed Files To A Local File run: diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index fa9bf24aaae..ca208437990 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -215,6 +215,10 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi + <%- endif %> diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 47f02f80f71..d0068105403 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -115,6 +115,9 @@ permissions: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi - name: Check Release Staged env: From 988fb7ef8524ee3e4c65579d7494808d0c8b8f1b Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 19 Jul 2023 22:02:09 +0100 Subject: [PATCH 14/73] Switch to `salt-ci-containers` Signed-off-by: Pedro Algarvio --- .github/workflows/lint-action.yml | 25 +++++++++++-------- .github/workflows/pre-commit-action.yml | 12 +++++---- .github/workflows/release-tag.yml | 2 +- .github/workflows/release-update-winrepo.yml | 2 +- .github/workflows/test-action-macos.yml | 4 +-- .../test-package-downloads-action-macos.yml | 2 +- .../workflows/test-packages-action-macos.yml | 4 +-- 7 files changed, 29 insertions(+), 22 deletions(-) diff --git a/.github/workflows/lint-action.yml b/.github/workflows/lint-action.yml index 6e0df43e4d0..31a43701169 100644 --- a/.github/workflows/lint-action.yml +++ b/.github/workflows/lint-action.yml @@ -18,19 +18,21 @@ env: jobs: Salt: name: Lint Salt's Source Code - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }} container: - image: python:3.8-slim-buster + image: ghcr.io/saltstack/salt-ci-containers/python:3.8 steps: - name: Install System Deps run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list apt-get update - apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports + apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + + - name: Add Git Safe Directory + run: | + git config --global --add safe.directory "$(pwd)" - uses: actions/checkout@v3 @@ -60,19 +62,22 @@ jobs: Tests: name: Lint Salt's Test Suite - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }} container: - image: python:3.8-slim-buster + image: ghcr.io/saltstack/salt-ci-containers/python:3.8 steps: - name: Install System Deps run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list + echo "deb http://deb.debian.org/debian bookworm-backports main" >> /etc/apt/sources.list apt-get update - apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports + apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + + - name: Add Git Safe Directory + run: | + git config --global --add safe.directory "$(pwd)" - uses: actions/checkout@v3 diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml index 41a3091619f..fbafa2c4b47 100644 --- a/.github/workflows/pre-commit-action.yml +++ b/.github/workflows/pre-commit-action.yml @@ -21,19 +21,21 @@ jobs: Pre-Commit: name: Run Pre-Commit Against Salt - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} container: - image: python:3.10-slim-buster + image: ghcr.io/saltstack/salt-ci-containers/python:3.10 steps: - name: Install System Deps run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list apt-get update - apt-get install -y wget curl enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports + apt-get install -y wget curl enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + + - name: Add Git Safe Directory + run: | + git config --global --add safe.directory "$(pwd)" - uses: actions/checkout@v3 - uses: ./.github/actions/setup-actionlint diff --git a/.github/workflows/release-tag.yml b/.github/workflows/release-tag.yml index b2ec9913fc6..f1bc68d3fff 100644 --- a/.github/workflows/release-tag.yml +++ b/.github/workflows/release-tag.yml @@ -31,7 +31,7 @@ jobs: permissions: contents: write # for dev-drprasad/delete-tag-and-release to delete tags or releases name: Generate Tag and Github Release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: dev-drprasad/delete-tag-and-release@v0.2.0 if: github.event.inputs.reTag == 'true' diff --git a/.github/workflows/release-update-winrepo.yml b/.github/workflows/release-update-winrepo.yml index 91ce9df6f73..88fbd71773a 100644 --- a/.github/workflows/release-update-winrepo.yml +++ b/.github/workflows/release-update-winrepo.yml @@ -19,7 +19,7 @@ permissions: jobs: update-winrepo: name: Update Winrepo - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - name: Checkout Salt diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index fdede59b807..d8b3df5cc03 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -64,7 +64,7 @@ jobs: generate-matrix: name: Generate Test Matrix - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: matrix-include: ${{ steps.generate-matrix.outputs.matrix }} transport-matrix-include: ${{ steps.generate-transport-matrix.outputs.matrix }} @@ -445,7 +445,7 @@ jobs: report: name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped' - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - generate-matrix - test diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index e24ffbeed8e..1351469f64f 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -259,7 +259,7 @@ jobs: report: name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}t environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index b517c53745c..757baec178d 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -70,7 +70,7 @@ jobs: generate-matrix: name: Generate Package Test Matrix - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }} steps: @@ -275,7 +275,7 @@ jobs: report: name: Reports for ${{ inputs.distro-slug }}(${{ matrix.test-chunk }}) - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test From c3bc185be53a1ea7943f255b1b2ad96ab01d8af8 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 20 Jul 2023 14:52:53 +0100 Subject: [PATCH 15/73] Use the `large` GH runners for linting Signed-off-by: Pedro Algarvio --- .github/workflows/lint-action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint-action.yml b/.github/workflows/lint-action.yml index 31a43701169..a19b9d3088e 100644 --- a/.github/workflows/lint-action.yml +++ b/.github/workflows/lint-action.yml @@ -18,7 +18,7 @@ env: jobs: Salt: name: Lint Salt's Source Code - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "large", "x86_64"]') || 'ubuntu-latest' }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }} container: @@ -62,7 +62,7 @@ jobs: Tests: name: Lint Salt's Test Suite - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "large", "x86_64"]') || 'ubuntu-latest' }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }} container: From 18c7a1f8f14fd5c51190c8048931e340e074c94a Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Fri, 14 Jul 2023 08:40:53 -0600 Subject: [PATCH 16/73] [3006.2] update requests --- changelog/64336.security.md | 4 ++++ requirements/static/ci/py3.10/cloud.txt | 8 ++++---- requirements/static/ci/py3.10/darwin.txt | 8 ++++---- requirements/static/ci/py3.10/docs.txt | 4 ++-- requirements/static/ci/py3.10/freebsd.txt | 8 ++++---- requirements/static/ci/py3.10/lint.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 8 ++++---- requirements/static/ci/py3.10/pkgtests-windows.txt | 2 +- requirements/static/ci/py3.10/pkgtests.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 8 ++++---- requirements/static/ci/py3.7/cloud.txt | 8 ++++---- requirements/static/ci/py3.7/docs.txt | 4 ++-- requirements/static/ci/py3.7/freebsd.txt | 8 ++++---- requirements/static/ci/py3.7/lint.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 8 ++++---- requirements/static/ci/py3.7/windows.txt | 8 ++++---- requirements/static/ci/py3.8/cloud.txt | 8 ++++---- requirements/static/ci/py3.8/docs.txt | 4 ++-- requirements/static/ci/py3.8/freebsd.txt | 8 ++++---- requirements/static/ci/py3.8/lint.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 8 ++++---- requirements/static/ci/py3.8/windows.txt | 8 ++++---- requirements/static/ci/py3.9/cloud.txt | 8 ++++---- requirements/static/ci/py3.9/darwin.txt | 8 ++++---- requirements/static/ci/py3.9/docs.txt | 4 ++-- requirements/static/ci/py3.9/freebsd.txt | 8 ++++---- requirements/static/ci/py3.9/lint.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 8 ++++---- requirements/static/ci/py3.9/windows.txt | 8 ++++---- requirements/static/pkg/py3.10/darwin.txt | 4 ++-- requirements/static/pkg/py3.10/freebsd.txt | 4 ++-- requirements/static/pkg/py3.10/linux.txt | 4 ++-- requirements/static/pkg/py3.10/windows.txt | 4 ++-- requirements/static/pkg/py3.7/freebsd.txt | 4 ++-- requirements/static/pkg/py3.7/linux.txt | 4 ++-- requirements/static/pkg/py3.7/windows.txt | 4 ++-- requirements/static/pkg/py3.8/freebsd.txt | 4 ++-- requirements/static/pkg/py3.8/linux.txt | 4 ++-- requirements/static/pkg/py3.8/windows.txt | 4 ++-- requirements/static/pkg/py3.9/darwin.txt | 4 ++-- requirements/static/pkg/py3.9/freebsd.txt | 4 ++-- requirements/static/pkg/py3.9/linux.txt | 4 ++-- requirements/static/pkg/py3.9/windows.txt | 4 ++-- 43 files changed, 118 insertions(+), 114 deletions(-) create mode 100644 changelog/64336.security.md diff --git a/changelog/64336.security.md b/changelog/64336.security.md new file mode 100644 index 00000000000..a7b1c186a1d --- /dev/null +++ b/changelog/64336.security.md @@ -0,0 +1,4 @@ +Upgrade to `requests==2.31.0` + +Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 0a5fc41eb86..751ccf03051 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -365,10 +365,10 @@ cffi==1.14.6 # bcrypt # cryptography # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -765,7 +765,7 @@ requests-ntlm==1.1.0 # via pywinrm requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 7103b45b1aa..69d3711f56c 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -366,10 +366,10 @@ cffi==1.14.6 # cryptography # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -757,7 +757,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 42cc0af22f9..ce07a3a5061 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -12,7 +12,7 @@ certifi==2022.12.7 # via # -c requirements/static/ci/py3.10/linux.txt # requests -chardet==3.0.4 +charset-normalizer==2.0.12 # via # -c requirements/static/ci/py3.10/linux.txt # requests @@ -139,7 +139,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index c5c99cda87b..5c66b684333 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -364,10 +364,10 @@ cffi==1.14.6 # cryptography # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -757,7 +757,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index b152b352f3d..441c2a17678 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -732,7 +732,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.26.0 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 1b81e68fa16..e5030db7ce1 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -378,10 +378,10 @@ cffi==1.14.6 # cryptography # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -787,7 +787,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/pkgtests-windows.txt b/requirements/static/ci/py3.10/pkgtests-windows.txt index d78bb773a5e..fc22d0a9805 100644 --- a/requirements/static/ci/py3.10/pkgtests-windows.txt +++ b/requirements/static/ci/py3.10/pkgtests-windows.txt @@ -142,7 +142,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # via # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.28.2 +requests==2.31.0 # via -r requirements/base.txt six==1.16.0 # via cheroot diff --git a/requirements/static/ci/py3.10/pkgtests.txt b/requirements/static/ci/py3.10/pkgtests.txt index 7d795f16f38..bb18c792933 100644 --- a/requirements/static/ci/py3.10/pkgtests.txt +++ b/requirements/static/ci/py3.10/pkgtests.txt @@ -129,7 +129,7 @@ pyzmq==25.0.2 # via # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.28.2 +requests==2.31.0 # via # -r requirements/base.txt # docker diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index c7736afe27d..8dd4c9502b7 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -49,10 +49,10 @@ cffi==1.14.6 # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests charset-normalizer==2.1.1 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -322,7 +322,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 54e60b5f31a..7196206607d 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -368,10 +368,10 @@ cffi==1.14.6 # cryptography # napalm # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -810,7 +810,7 @@ requests-ntlm==1.1.0 # via pywinrm requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.7/docs.txt b/requirements/static/ci/py3.7/docs.txt index 874b37d15cb..96093a63491 100644 --- a/requirements/static/ci/py3.7/docs.txt +++ b/requirements/static/ci/py3.7/docs.txt @@ -12,7 +12,7 @@ certifi==2022.12.7 # via # -c requirements/static/ci/py3.7/linux.txt # requests -chardet==3.0.4 +charset-normalizer==2.0.12 # via # -c requirements/static/ci/py3.7/linux.txt # requests @@ -147,7 +147,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index a2540a91d62..914ede9e881 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -367,10 +367,10 @@ cffi==1.14.6 # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -796,7 +796,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index 3c462854de6..30e74e8c179 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -778,7 +778,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.26.0 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 6be7aa6c23e..57ca1369c35 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -381,10 +381,10 @@ cffi==1.14.6 # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -828,7 +828,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index 012380b7f01..bb5cbef5cb3 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -55,10 +55,10 @@ cffi==1.14.6 # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -336,7 +336,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 3b902826f8f..4639d0e5cf0 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -366,10 +366,10 @@ cffi==1.14.6 # cryptography # napalm # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -799,7 +799,7 @@ requests-ntlm==1.1.0 # via pywinrm requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index 8a7e9a974c5..3f7c81f3f36 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -12,7 +12,7 @@ certifi==2022.12.7 # via # -c requirements/static/ci/py3.8/linux.txt # requests -chardet==3.0.4 +charset-normalizer==2.0.12 # via # -c requirements/static/ci/py3.8/linux.txt # requests @@ -139,7 +139,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 8a8f1413d13..cfa7ee3fc00 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -365,10 +365,10 @@ cffi==1.14.6 # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -786,7 +786,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index 91fe6e4c095..5c36acb022d 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -769,7 +769,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.26.0 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 500d256ec0a..f3eb0152a43 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -379,10 +379,10 @@ cffi==1.14.6 # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -816,7 +816,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index b3dbeb74b09..3de5af532da 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -51,10 +51,10 @@ cffi==1.14.6 # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -324,7 +324,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 6fbd3af1e6e..42583c5267d 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -366,10 +366,10 @@ cffi==1.14.6 # cryptography # napalm # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -802,7 +802,7 @@ requests-ntlm==1.1.0 # via pywinrm requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index afd6f276d91..278dd620676 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -367,10 +367,10 @@ cffi==1.14.6 # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -789,7 +789,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index aeba29cca9c..96c88b70650 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -12,7 +12,7 @@ certifi==2022.12.7 # via # -c requirements/static/ci/py3.9/linux.txt # requests -chardet==3.0.4 +charset-normalizer==2.0.12 # via # -c requirements/static/ci/py3.9/linux.txt # requests @@ -143,7 +143,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 4d4c68692f1..04916db64fc 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -365,10 +365,10 @@ cffi==1.14.6 # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -789,7 +789,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 80fef45547a..f17f539b6fc 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -770,7 +770,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.26.0 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index aba6918f419..af401daa96a 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -381,10 +381,10 @@ cffi==1.14.6 # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -821,7 +821,7 @@ redis==3.5.3 # via redis-py-cluster requests-oauthlib==1.3.0 # via msrest -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 7efff7ade82..65f72d7fc3e 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -51,10 +51,10 @@ cffi==1.14.6 # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests charset-normalizer==2.0.12 - # via aiohttp + # via + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 @@ -325,7 +325,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/static/ci/common.in diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index daf832bbb38..30246a95e32 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -10,7 +10,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -93,7 +93,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # apache-libcloud diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index d23b7f92d80..aa38501b8f2 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -83,7 +83,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index bba6c238e71..912c23cd9d2 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -81,7 +81,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 04ec542cbe6..0603c17b190 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -108,7 +108,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index 2d977066b3d..b6326e74376 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -81,7 +81,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index 624cc1b8f67..6f69609066b 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -79,7 +79,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index 8169d3d689b..98ba3cbc8e4 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -109,7 +109,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index 98f692b5e32..aef6e5b9dfd 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -81,7 +81,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index 135bf3051a3..1e6bf3a036f 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -79,7 +79,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 3d07e6c272d..2cd2c5dba1f 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -109,7 +109,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index a0eb9b18cdf..30cfdba6e5a 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -10,7 +10,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -93,7 +93,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # apache-libcloud diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 712c3273927..f67480f1059 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -83,7 +83,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index dc3dbc1a14b..b00714bff87 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -8,7 +8,7 @@ certifi==2022.12.7 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -81,7 +81,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index dc6a8f55262..81a3df01a42 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -109,7 +109,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt From 4e48d9d803b0f5b67cfb83b0d9b498d456d80cd5 Mon Sep 17 00:00:00 2001 From: Thomas Phipps Date: Thu, 20 Jul 2023 20:27:22 +0000 Subject: [PATCH 17/73] correct changeog --- changelog/{64370.security.md => cve-2023-20897.security.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename changelog/{64370.security.md => cve-2023-20897.security.md} (100%) diff --git a/changelog/64370.security.md b/changelog/cve-2023-20897.security.md similarity index 100% rename from changelog/64370.security.md rename to changelog/cve-2023-20897.security.md From ad847f86c43f7b264a55a8a42753eaa5cfa48002 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 26 Jul 2023 12:17:13 +0100 Subject: [PATCH 18/73] Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 Signed-off-by: Pedro Algarvio --- changelog/64718.security.md | 1 + requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/docs.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/lint.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/pkgtests-windows.txt | 2 +- requirements/static/ci/py3.10/pkgtests.txt | 2 +- requirements/static/ci/py3.10/tools.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/tools.txt | 2 +- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/docs.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 2 +- requirements/static/ci/py3.7/lint.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.7/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/docs.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/lint.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/docs.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/lint.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/tools.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- requirements/static/pkg/py3.10/darwin.txt | 2 +- requirements/static/pkg/py3.10/freebsd.txt | 2 +- requirements/static/pkg/py3.10/linux.txt | 2 +- requirements/static/pkg/py3.10/windows.txt | 2 +- requirements/static/pkg/py3.7/freebsd.txt | 2 +- requirements/static/pkg/py3.7/linux.txt | 2 +- requirements/static/pkg/py3.7/windows.txt | 2 +- requirements/static/pkg/py3.8/freebsd.txt | 2 +- requirements/static/pkg/py3.8/linux.txt | 2 +- requirements/static/pkg/py3.8/windows.txt | 2 +- requirements/static/pkg/py3.9/darwin.txt | 2 +- requirements/static/pkg/py3.9/freebsd.txt | 2 +- requirements/static/pkg/py3.9/linux.txt | 2 +- requirements/static/pkg/py3.9/windows.txt | 2 +- 46 files changed, 46 insertions(+), 45 deletions(-) create mode 100644 changelog/64718.security.md diff --git a/changelog/64718.security.md b/changelog/64718.security.md new file mode 100644 index 00000000000..b40aef1ad85 --- /dev/null +++ b/changelog/64718.security.md @@ -0,0 +1 @@ +Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 751ccf03051..2f80b5afe75 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -350,7 +350,7 @@ cachetools==4.2.2 # via google-auth cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 69d3711f56c..29902f3471e 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -350,7 +350,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index ce07a3a5061..588b8682f56 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -8,7 +8,7 @@ alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.10/linux.txt # requests diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 5c66b684333..a6088ab4e56 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -348,7 +348,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index 441c2a17678..1d546cdce92 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -353,7 +353,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index e5030db7ce1..1a71cb2c147 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -361,7 +361,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.10/pkgtests-windows.txt b/requirements/static/ci/py3.10/pkgtests-windows.txt index fc22d0a9805..de7ee93e182 100644 --- a/requirements/static/ci/py3.10/pkgtests-windows.txt +++ b/requirements/static/ci/py3.10/pkgtests-windows.txt @@ -13,7 +13,7 @@ attrs==22.2.0 # pytest-system-statistics autocommand==2.2.2 # via jaraco.text -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.15.1 # via clr-loader diff --git a/requirements/static/ci/py3.10/pkgtests.txt b/requirements/static/ci/py3.10/pkgtests.txt index bb18c792933..127e64a8857 100644 --- a/requirements/static/ci/py3.10/pkgtests.txt +++ b/requirements/static/ci/py3.10/pkgtests.txt @@ -13,7 +13,7 @@ attrs==22.2.0 # pytest-system-statistics autocommand==2.2.2 # via jaraco.text -certifi==2022.12.7 +certifi==2023.07.22 # via requests charset-normalizer==3.0.1 # via requests diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt index 8fcddf7da97..a9f3627674b 100644 --- a/requirements/static/ci/py3.10/tools.txt +++ b/requirements/static/ci/py3.10/tools.txt @@ -14,7 +14,7 @@ botocore==1.24.46 # via # boto3 # s3transfer -certifi==2022.12.7 +certifi==2023.07.22 # via requests charset-normalizer==3.0.1 # via requests diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 8dd4c9502b7..cc1d4be6809 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -36,7 +36,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # -r requirements/windows.txt diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt index 556718941f1..733d7d186c6 100644 --- a/requirements/static/ci/py3.11/tools.txt +++ b/requirements/static/ci/py3.11/tools.txt @@ -14,7 +14,7 @@ botocore==1.24.46 # via # boto3 # s3transfer -certifi==2022.12.7 +certifi==2023.07.22 # via requests charset-normalizer==3.0.1 # via requests diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 7196206607d..5bf5f8e2fa8 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -352,7 +352,7 @@ cachetools==4.2.2 # via google-auth cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.7/docs.txt b/requirements/static/ci/py3.7/docs.txt index 96093a63491..31209a994b2 100644 --- a/requirements/static/ci/py3.7/docs.txt +++ b/requirements/static/ci/py3.7/docs.txt @@ -8,7 +8,7 @@ alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.7/linux.txt # requests diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 914ede9e881..e50e27f1e59 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -350,7 +350,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index 30e74e8c179..433b9e65b7a 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -357,7 +357,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 57ca1369c35..14a0fc92f75 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -363,7 +363,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index bb5cbef5cb3..74af3829dd3 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -42,7 +42,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # -r requirements/windows.txt diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 4639d0e5cf0..517712ebd62 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -350,7 +350,7 @@ cachetools==4.2.2 # via google-auth cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index 3f7c81f3f36..da96a0c3bfb 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -8,7 +8,7 @@ alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.8/linux.txt # requests diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index cfa7ee3fc00..abd15d947e9 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -348,7 +348,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index 5c36acb022d..964cd85822d 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -355,7 +355,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index f3eb0152a43..80168bb9525 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -361,7 +361,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 3de5af532da..ecc0e21110f 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -38,7 +38,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # -r requirements/windows.txt diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 42583c5267d..3b47a0e4eac 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -350,7 +350,7 @@ cachetools==4.2.2 # via google-auth cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 278dd620676..7d019849de5 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -350,7 +350,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index 96c88b70650..772a37c357a 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -8,7 +8,7 @@ alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.9/linux.txt # requests diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 04916db64fc..78c2e3d3de8 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -348,7 +348,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index f17f539b6fc..fc246bac01d 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -353,7 +353,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.25.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index af401daa96a..5f07981bec6 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -363,7 +363,7 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # kubernetes diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt index 75862497bb6..753320ee372 100644 --- a/requirements/static/ci/py3.9/tools.txt +++ b/requirements/static/ci/py3.9/tools.txt @@ -14,7 +14,7 @@ botocore==1.24.46 # via # boto3 # s3transfer -certifi==2022.12.7 +certifi==2023.07.22 # via requests charset-normalizer==3.0.1 # via requests diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 65f72d7fc3e..5ea6ed98dff 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -38,7 +38,7 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/static/ci/common.in # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index 30246a95e32..fd3f1f08908 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -6,7 +6,7 @@ # apache-libcloud==2.5.0 # via -r requirements/darwin.txt -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index aa38501b8f2..d995308534f 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.10/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index 912c23cd9d2..8a638b09392 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.10/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 0603c17b190..6eda1b8107f 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.10/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index b6326e74376..2bc8d746ce2 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.7/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index 6f69609066b..77152d7c355 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.7/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index 98ba3cbc8e4..902910e6e4f 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.7/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index aef6e5b9dfd..6534349e226 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.8/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index 1e6bf3a036f..18aa236f77f 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.8/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 2cd2c5dba1f..d529eb5450c 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.8/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 30cfdba6e5a..45aca678f08 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -6,7 +6,7 @@ # apache-libcloud==2.5.0 # via -r requirements/darwin.txt -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index f67480f1059..0154d1ef404 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.9/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index b00714bff87..62dc8d341b7 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.9/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index 81a3df01a42..03b89c5632b 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/static/pkg/py3.9/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests From a95af1ec7769ab988b4f5b930766aeb56c45f0ae Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 27 Jul 2023 13:43:12 +0100 Subject: [PATCH 19/73] Properly upgrade Debian based systems unattended Signed-off-by: Pedro Algarvio --- pkg/tests/conftest.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 5eba84bb4aa..b7d908f32d5 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -52,7 +52,18 @@ def _system_up_to_date( if grains["os_family"] == "Debian": ret = shell.run("apt", "update") assert ret.returncode == 0 - ret = shell.run("apt", "upgrade", "-y") + env = os.environ.copy() + env["DEBIAN_FRONTEND"] = "noninteractive" + ret = shell.run( + "apt", + "upgrade", + "-y", + "-o", + "DPkg::Options::=--force-confdef", + "-o", + "DPkg::Options::=--force-confold", + env=env, + ) assert ret.returncode == 0 elif grains["os_family"] == "Redhat": ret = shell.run("yum", "update", "-y") From 7d2e4b069221d4373afeea077decac93888a8481 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 26 Jul 2023 16:29:02 +0100 Subject: [PATCH 20/73] Upgrade `relenv` to `0.13.2` and Python to `3.10.12`. Addresses multiple CVEs in dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 Signed-off-by: Pedro Algarvio --- .github/actions/setup-relenv/action.yml | 2 +- .github/workflows/build-deps-onedir.yml | 4 +- .github/workflows/build-salt-onedir.yml | 4 +- .github/workflows/ci.yml | 110 +++++++-------- .github/workflows/nightly.yml | 110 +++++++-------- .github/workflows/release.yml | 64 ++++----- .github/workflows/scheduled.yml | 110 +++++++-------- .github/workflows/staging.yml | 174 ++++++++++++------------ changelog/64719.security.md | 3 + cicd/shared-gh-workflows-context.yml | 4 +- pkg/macos/build_python.sh | 4 +- pkg/windows/build.ps1 | 6 +- pkg/windows/build_python.ps1 | 6 +- 13 files changed, 298 insertions(+), 303 deletions(-) create mode 100644 changelog/64719.security.md diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index 1f228fd1822..50ade327764 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -22,7 +22,7 @@ inputs: required: false type: string description: The version of relenv to use - default: 0.12.3 + default: 0.13.2 outputs: version: diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index 6197ebc0333..c0371734bb7 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -21,12 +21,12 @@ on: relenv-version: required: false type: string - default: 0.12.3 + default: 0.13.2 description: The version of relenv to use python-version: required: false type: string - default: 3.10.9 + default: 3.10.12 description: The version of python to use with relenv env: diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 972176c8cee..bc89e8bb1e4 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -21,12 +21,12 @@ on: relenv-version: required: false type: string - default: 0.12.3 + default: 0.13.2 description: The version of relenv to use python-version: required: false type: string - default: 3.10.9 + default: 3.10.12 description: The version of python to use with relenv env: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 96c8b435a75..f36a04cf082 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -442,8 +442,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-salt-onedir: name: Build Salt Onedir @@ -458,8 +458,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-rpm-pkgs: name: Build RPM Packages @@ -470,8 +470,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-deb-pkgs: name: Build DEB Packages @@ -482,8 +482,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-windows-pkgs: name: Build Windows Packages @@ -494,8 +494,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-macos-pkgs: name: Build macOS Packages @@ -506,8 +506,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Tests @@ -522,7 +522,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -540,7 +540,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -558,7 +558,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -576,7 +576,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -594,7 +594,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -612,7 +612,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -630,7 +630,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -648,7 +648,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -666,7 +666,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -684,7 +684,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -702,7 +702,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -720,7 +720,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -738,7 +738,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -756,7 +756,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -774,7 +774,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -792,7 +792,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -810,7 +810,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -828,7 +828,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -846,7 +846,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -864,7 +864,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -883,7 +883,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -901,7 +901,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -919,7 +919,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -937,7 +937,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -955,7 +955,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -973,7 +973,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -991,7 +991,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1009,7 +1009,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1027,7 +1027,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1045,7 +1045,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1063,7 +1063,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1081,7 +1081,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1099,7 +1099,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1117,7 +1117,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1135,7 +1135,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1153,7 +1153,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1171,7 +1171,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1189,7 +1189,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1207,7 +1207,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1225,7 +1225,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1243,7 +1243,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1261,7 +1261,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} @@ -1279,7 +1279,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index a6ee739132a..e5971fbbc81 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -491,8 +491,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-salt-onedir: name: Build Salt Onedir @@ -507,8 +507,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-rpm-pkgs: name: Build RPM Packages @@ -519,8 +519,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-deb-pkgs: name: Build DEB Packages @@ -531,8 +531,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-windows-pkgs: name: Build Windows Packages @@ -543,8 +543,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" environment: nightly sign-packages: false secrets: inherit @@ -558,8 +558,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" environment: nightly sign-packages: true secrets: inherit @@ -577,7 +577,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -595,7 +595,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -613,7 +613,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -631,7 +631,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -649,7 +649,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -667,7 +667,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -685,7 +685,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -703,7 +703,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -721,7 +721,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -739,7 +739,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -757,7 +757,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -775,7 +775,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -793,7 +793,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -811,7 +811,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -829,7 +829,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -847,7 +847,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -865,7 +865,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -883,7 +883,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -901,7 +901,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -919,7 +919,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -938,7 +938,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -956,7 +956,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -974,7 +974,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -992,7 +992,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1010,7 +1010,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1028,7 +1028,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1046,7 +1046,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1064,7 +1064,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1082,7 +1082,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1100,7 +1100,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1118,7 +1118,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1136,7 +1136,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1154,7 +1154,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1172,7 +1172,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1190,7 +1190,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1208,7 +1208,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1226,7 +1226,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1244,7 +1244,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1262,7 +1262,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1280,7 +1280,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1298,7 +1298,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1316,7 +1316,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1334,7 +1334,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a301591ed60..dc96898061a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -235,7 +235,7 @@ jobs: distro-slug: almalinux-8 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -255,7 +255,7 @@ jobs: distro-slug: almalinux-8-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -275,7 +275,7 @@ jobs: distro-slug: almalinux-9 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -295,7 +295,7 @@ jobs: distro-slug: almalinux-9-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -315,7 +315,7 @@ jobs: distro-slug: amazonlinux-2 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -335,7 +335,7 @@ jobs: distro-slug: amazonlinux-2-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -355,7 +355,7 @@ jobs: distro-slug: centos-7 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -375,7 +375,7 @@ jobs: distro-slug: centos-7-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -395,7 +395,7 @@ jobs: distro-slug: centosstream-8 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -415,7 +415,7 @@ jobs: distro-slug: centosstream-8-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -435,7 +435,7 @@ jobs: distro-slug: centosstream-9 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -455,7 +455,7 @@ jobs: distro-slug: centosstream-9-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -475,7 +475,7 @@ jobs: distro-slug: debian-10 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -495,7 +495,7 @@ jobs: distro-slug: debian-11 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -515,7 +515,7 @@ jobs: distro-slug: debian-11-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -535,7 +535,7 @@ jobs: distro-slug: fedora-37 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -555,7 +555,7 @@ jobs: distro-slug: fedora-37-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -575,7 +575,7 @@ jobs: distro-slug: fedora-38 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -595,7 +595,7 @@ jobs: distro-slug: fedora-38-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -615,7 +615,7 @@ jobs: distro-slug: photonos-3 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -635,7 +635,7 @@ jobs: distro-slug: photonos-4 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -655,7 +655,7 @@ jobs: distro-slug: ubuntu-20.04 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -675,7 +675,7 @@ jobs: distro-slug: ubuntu-20.04-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -695,7 +695,7 @@ jobs: distro-slug: ubuntu-22.04 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -715,7 +715,7 @@ jobs: distro-slug: ubuntu-22.04-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -735,7 +735,7 @@ jobs: distro-slug: ubuntu-22.04 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -755,7 +755,7 @@ jobs: distro-slug: ubuntu-22.04-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -775,7 +775,7 @@ jobs: distro-slug: macos-12 platform: darwin arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -795,7 +795,7 @@ jobs: distro-slug: macos-12 platform: darwin arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -816,7 +816,7 @@ jobs: platform: windows arch: amd64 pkg-type: nsis - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -836,7 +836,7 @@ jobs: platform: windows arch: amd64 pkg-type: msi - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true @@ -856,7 +856,7 @@ jobs: platform: windows arch: amd64 pkg-type: onedir - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: release skip-code-coverage: true diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 98dfdeb4fa6..e6dd4bcd94d 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -476,8 +476,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-salt-onedir: name: Build Salt Onedir @@ -492,8 +492,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-rpm-pkgs: name: Build RPM Packages @@ -504,8 +504,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-deb-pkgs: name: Build DEB Packages @@ -516,8 +516,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-windows-pkgs: name: Build Windows Packages @@ -528,8 +528,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-macos-pkgs: name: Build macOS Packages @@ -540,8 +540,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Tests @@ -556,7 +556,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -574,7 +574,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -592,7 +592,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -610,7 +610,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -628,7 +628,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -646,7 +646,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -664,7 +664,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -682,7 +682,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -700,7 +700,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -718,7 +718,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -736,7 +736,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -754,7 +754,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -772,7 +772,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -790,7 +790,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -808,7 +808,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -826,7 +826,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -844,7 +844,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -862,7 +862,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -880,7 +880,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -898,7 +898,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -917,7 +917,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -935,7 +935,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -953,7 +953,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -971,7 +971,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -989,7 +989,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1007,7 +1007,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1025,7 +1025,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1043,7 +1043,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1061,7 +1061,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1079,7 +1079,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1097,7 +1097,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1115,7 +1115,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1133,7 +1133,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1151,7 +1151,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1169,7 +1169,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1187,7 +1187,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1205,7 +1205,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1223,7 +1223,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1241,7 +1241,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1259,7 +1259,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1277,7 +1277,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1295,7 +1295,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false @@ -1313,7 +1313,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: false skip-junit-reports: false diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index a2d1bef32b1..9666fd465f4 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -482,8 +482,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-salt-onedir: name: Build Salt Onedir @@ -498,8 +498,8 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-rpm-pkgs: name: Build RPM Packages @@ -510,8 +510,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-deb-pkgs: name: Build DEB Packages @@ -522,8 +522,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" build-windows-pkgs: name: Build Windows Packages @@ -534,8 +534,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" environment: staging sign-packages: ${{ inputs.sign-windows-packages }} secrets: inherit @@ -549,8 +549,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.12.3" - python-version: "3.10.11" + relenv-version: "0.13.2" + python-version: "3.10.12" environment: staging sign-packages: true secrets: inherit @@ -568,7 +568,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -586,7 +586,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -604,7 +604,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -622,7 +622,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -640,7 +640,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -658,7 +658,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -676,7 +676,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -694,7 +694,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -712,7 +712,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -730,7 +730,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -748,7 +748,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -766,7 +766,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -784,7 +784,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -802,7 +802,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -820,7 +820,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -838,7 +838,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -856,7 +856,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -874,7 +874,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -892,7 +892,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -910,7 +910,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} @@ -929,7 +929,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -947,7 +947,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -965,7 +965,7 @@ jobs: arch: amd64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -983,7 +983,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1001,7 +1001,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1019,7 +1019,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1037,7 +1037,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1055,7 +1055,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1073,7 +1073,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1091,7 +1091,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1109,7 +1109,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1127,7 +1127,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1145,7 +1145,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1163,7 +1163,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1181,7 +1181,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1199,7 +1199,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1217,7 +1217,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1235,7 +1235,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1253,7 +1253,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1271,7 +1271,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1289,7 +1289,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1307,7 +1307,7 @@ jobs: arch: x86_64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -1325,7 +1325,7 @@ jobs: arch: aarch64 testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 skip-code-coverage: true skip-junit-reports: true @@ -2087,7 +2087,7 @@ jobs: distro-slug: almalinux-8 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2106,7 +2106,7 @@ jobs: distro-slug: almalinux-8-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2125,7 +2125,7 @@ jobs: distro-slug: almalinux-9 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2144,7 +2144,7 @@ jobs: distro-slug: almalinux-9-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2163,7 +2163,7 @@ jobs: distro-slug: amazonlinux-2 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2182,7 +2182,7 @@ jobs: distro-slug: amazonlinux-2-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2201,7 +2201,7 @@ jobs: distro-slug: centos-7 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2220,7 +2220,7 @@ jobs: distro-slug: centos-7-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2239,7 +2239,7 @@ jobs: distro-slug: centosstream-8 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2258,7 +2258,7 @@ jobs: distro-slug: centosstream-8-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2277,7 +2277,7 @@ jobs: distro-slug: centosstream-9 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2296,7 +2296,7 @@ jobs: distro-slug: centosstream-9-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2315,7 +2315,7 @@ jobs: distro-slug: debian-10 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2334,7 +2334,7 @@ jobs: distro-slug: debian-11 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2353,7 +2353,7 @@ jobs: distro-slug: debian-11-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2372,7 +2372,7 @@ jobs: distro-slug: fedora-37 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2391,7 +2391,7 @@ jobs: distro-slug: fedora-37-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2410,7 +2410,7 @@ jobs: distro-slug: fedora-38 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2429,7 +2429,7 @@ jobs: distro-slug: fedora-38-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2448,7 +2448,7 @@ jobs: distro-slug: photonos-3 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2467,7 +2467,7 @@ jobs: distro-slug: photonos-4 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2486,7 +2486,7 @@ jobs: distro-slug: ubuntu-20.04 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2505,7 +2505,7 @@ jobs: distro-slug: ubuntu-20.04-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2524,7 +2524,7 @@ jobs: distro-slug: ubuntu-22.04 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2543,7 +2543,7 @@ jobs: distro-slug: ubuntu-22.04-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2562,7 +2562,7 @@ jobs: distro-slug: ubuntu-22.04 platform: linux arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2581,7 +2581,7 @@ jobs: distro-slug: ubuntu-22.04-arm64 platform: linux arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2600,7 +2600,7 @@ jobs: distro-slug: macos-12 platform: darwin arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2619,7 +2619,7 @@ jobs: distro-slug: macos-12 platform: darwin arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2639,7 +2639,7 @@ jobs: platform: windows arch: amd64 pkg-type: nsis - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2658,7 +2658,7 @@ jobs: platform: windows arch: amd64 pkg-type: msi - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true @@ -2677,7 +2677,7 @@ jobs: platform: windows arch: amd64 pkg-type: onedir - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.12 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true diff --git a/changelog/64719.security.md b/changelog/64719.security.md new file mode 100644 index 00000000000..3476499d3d7 --- /dev/null +++ b/changelog/64719.security.md @@ -0,0 +1,3 @@ +Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + +Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index b554b57aafa..846d5625320 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,2 +1,2 @@ -python_version: "3.10.11" -relenv_version: "0.12.3" +python_version: "3.10.12" +relenv_version: "0.13.2" diff --git a/pkg/macos/build_python.sh b/pkg/macos/build_python.sh index be179b7b166..7d65dca843b 100755 --- a/pkg/macos/build_python.sh +++ b/pkg/macos/build_python.sh @@ -48,8 +48,8 @@ _usage() { echo " -v, --version version of python to install, must be available with relenv" echo " -r, --relenv-version version of python to install, must be available with relenv" echo "" - echo " To build python 3.10.11:" - echo " example: $0 --version 3.10.11" + echo " To build python 3.10.12:" + echo " example: $0 --version 3.10.12" } # _msg diff --git a/pkg/windows/build.ps1 b/pkg/windows/build.ps1 index 6c5cac2ac17..ac273a6c163 100644 --- a/pkg/windows/build.ps1 +++ b/pkg/windows/build.ps1 @@ -38,12 +38,8 @@ param( [Parameter(Mandatory=$false)] [ValidatePattern("^\d{1,2}.\d{1,2}.\d{1,2}$")] - [ValidateSet( - "3.11.3", - "3.10.11" - )] [Alias("p")] - [String] $PythonVersion = "3.10.11", + [String] $PythonVersion = "3.10.12", [Parameter(Mandatory=$false)] [Alias("r")] diff --git a/pkg/windows/build_python.ps1 b/pkg/windows/build_python.ps1 index e91f41fea00..90a2b053a92 100644 --- a/pkg/windows/build_python.ps1 +++ b/pkg/windows/build_python.ps1 @@ -17,12 +17,8 @@ build_python.ps1 -Version 3.10.9 -Architecture x86 param( [Parameter(Mandatory=$false)] [ValidatePattern("^\d{1,2}.\d{1,2}.\d{1,2}$")] - [ValidateSet( - "3.11.3", - "3.10.11" - )] [Alias("v")] - [String] $Version = "3.10.11", + [String] $Version = "3.10.12", [Parameter(Mandatory=$false)] [Alias("r")] From fff6078c29fe8005500e0f51673db96f29069295 Mon Sep 17 00:00:00 2001 From: Felippe Burk Date: Tue, 1 Aug 2023 10:30:16 -0600 Subject: [PATCH 21/73] adding obsoletes to rpm.spec for photon3 packages Signed-off-by: Felippe Burk --- pkg/rpm/salt.spec | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 9a6f71cba38..50b8e9c9783 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -32,6 +32,8 @@ Group: System Environment/Daemons License: ASL 2.0 URL: https://saltproject.io/ +Provides: salt = %{version} +Obsoletes: salt3 < 3006 BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) @@ -68,6 +70,8 @@ servers, handle them quickly and through a simple and manageable interface. Summary: Management component for salt, a parallel remote execution system Group: System Environment/Daemons Requires: %{name} = %{version}-%{release} +Provides: salt-master = %{version} +Obsoletes: salt3-master < 3006 %description master The Salt master is the central server to which all minions connect. @@ -77,6 +81,8 @@ The Salt master is the central server to which all minions connect. Summary: Client component for Salt, a parallel remote execution system Group: System Environment/Daemons Requires: %{name} = %{version}-%{release} +Provides: salt-minion = %{version} +Obsoletes: salt3-minion < 3006 %description minion The Salt minion is the agent component of Salt. It listens for instructions @@ -87,6 +93,8 @@ from the master, runs jobs, and returns results back to the master. Summary: Master-of-master component for Salt, a parallel remote execution system Group: System Environment/Daemons Requires: %{name}-master = %{version}-%{release} +Provides: salt-syndic = %{version} +Obsoletes: salt3-syndic < 3006 %description syndic The Salt syndic is a master daemon which can receive instruction from a @@ -98,6 +106,8 @@ infrastructure. Summary: REST API for Salt, a parallel remote execution system Group: Applications/System Requires: %{name}-master = %{version}-%{release} +Provides: salt-api = %{version} +Obsoletes: salt3-api < 3006 %description api salt-api provides a REST interface to the Salt master. @@ -107,6 +117,8 @@ salt-api provides a REST interface to the Salt master. Summary: Cloud provisioner for Salt, a parallel remote execution system Group: Applications/System Requires: %{name}-master = %{version}-%{release} +Provides: salt-cloud = %{version} +Obsoletes: salt3-cloud < 3006 %description cloud The salt-cloud tool provisions new cloud VMs, installs salt-minion on them, and @@ -117,6 +129,8 @@ adds them to the master's collection of controllable minions. Summary: Agentless SSH-based version of Salt, a parallel remote execution system Group: Applications/System Requires: %{name} = %{version}-%{release} +Provides: salt-ssh = %{version} +Obsoletes: salt3-ssh < 3006 %description ssh The salt-ssh tool can run remote execution functions and states without the use From 06ba42c0ff9ca1eb5d8a84412581e50285ef9d41 Mon Sep 17 00:00:00 2001 From: twangboy Date: Tue, 1 Aug 2023 12:36:21 -0600 Subject: [PATCH 22/73] Fallback to WMI when pythonnet fails --- salt/utils/win_network.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/salt/utils/win_network.py b/salt/utils/win_network.py index 702f29702f3..abbe15145d9 100644 --- a/salt/utils/win_network.py +++ b/salt/utils/win_network.py @@ -20,6 +20,7 @@ depending on the version of Windows this is run on. Once support for Windows """ # https://docs.microsoft.com/en-us/dotnet/api/system.net.networkinformation.networkinterface.getallnetworkinterfaces?view=netframework-4.7.2 +import logging import platform import salt.utils.win_reg @@ -27,6 +28,8 @@ from salt._compat import ipaddress IS_WINDOWS = platform.system() == "Windows" +log = logging.getLogger(__name__) + __virtualname__ = "win_network" if IS_WINDOWS: @@ -53,8 +56,20 @@ if IS_WINDOWS: import salt.utils.winapi else: # This uses .NET to get network settings and is faster than WMI - import clr - from System.Net import NetworkInformation + try: + import clr + from System.Net import NetworkInformation + except RuntimeError: + # In some environments, using the Relenv OneDir package, we can't + # load pythonnet. Uninstalling and reinstalling pythonnet fixes the + # issue, but it is a manual step. Until we figure it out, we are + # just going to fall back to WMI. I was able to reproduce a failing + # system using Windows 10 Home Edition + log.debug("Failed to load pythonnet. Falling back to WMI") + USE_WMI = True + import wmi + + import salt.utils.winapi # TODO: Should we deprecate support for pythonnet 2.5.2, these enumerations can # TODO: be deleted From 035afeda0145719e18279afa5397002b17551598 Mon Sep 17 00:00:00 2001 From: twangboy Date: Tue, 1 Aug 2023 17:03:49 -0600 Subject: [PATCH 23/73] Add changelog --- changelog/64897.fixed.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelog/64897.fixed.md diff --git a/changelog/64897.fixed.md b/changelog/64897.fixed.md new file mode 100644 index 00000000000..6914040120c --- /dev/null +++ b/changelog/64897.fixed.md @@ -0,0 +1,2 @@ +In scenarios where PythonNet fails to load, Salt will now fall back to WMI for +gathering grains information From b0dfa09544a0459bbf8e028361ca1c821e693dbb Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Wed, 2 Aug 2023 09:35:26 -0600 Subject: [PATCH 24/73] [3006.2] Update cryptography to 41.0.3 --- requirements/darwin.txt | 2 +- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/lint.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 2 +- requirements/static/ci/py3.7/lint.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.7/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/lint.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/lint.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- requirements/static/pkg/freebsd.in | 2 +- requirements/static/pkg/linux.in | 2 +- requirements/static/pkg/py3.10/darwin.txt | 2 +- requirements/static/pkg/py3.10/freebsd.txt | 2 +- requirements/static/pkg/py3.10/linux.txt | 2 +- requirements/static/pkg/py3.10/windows.txt | 2 +- requirements/static/pkg/py3.7/freebsd.txt | 2 +- requirements/static/pkg/py3.7/linux.txt | 2 +- requirements/static/pkg/py3.7/windows.txt | 2 +- requirements/static/pkg/py3.8/freebsd.txt | 2 +- requirements/static/pkg/py3.8/linux.txt | 2 +- requirements/static/pkg/py3.8/windows.txt | 2 +- requirements/static/pkg/py3.9/darwin.txt | 2 +- requirements/static/pkg/py3.9/freebsd.txt | 2 +- requirements/static/pkg/py3.9/linux.txt | 2 +- requirements/static/pkg/py3.9/windows.txt | 2 +- requirements/windows.txt | 2 +- 40 files changed, 40 insertions(+), 40 deletions(-) diff --git a/requirements/darwin.txt b/requirements/darwin.txt index 9edba2a9cd9..2b0106ba9d1 100644 --- a/requirements/darwin.txt +++ b/requirements/darwin.txt @@ -5,7 +5,7 @@ apache-libcloud>=2.4.0 backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' cherrypy>=17.4.1 -cryptography>=41.0.2 +cryptography>=41.0.3 gitpython>=3.1.30; python_version >= '3.7' idna>=2.8 linode-python>=1.1.1 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 2f80b5afe75..fbb2f2f0794 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -385,7 +385,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 29902f3471e..68b039e5afd 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -386,7 +386,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/darwin.txt # adal diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index a6088ab4e56..24ffd08a5cc 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -384,7 +384,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # adal diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index 1d546cdce92..5fefabc9d5c 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -390,7 +390,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 1a71cb2c147..02c58e79d46 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -398,7 +398,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index cc1d4be6809..38cf4ecc4c5 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -71,7 +71,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 5bf5f8e2fa8..fed89e49cb9 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -392,7 +392,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index e50e27f1e59..cf5ef07354b 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -391,7 +391,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # adal diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index 433b9e65b7a..5d78d99d661 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -399,7 +399,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 14a0fc92f75..3119e758e45 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -405,7 +405,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index 74af3829dd3..312f3ca1771 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -77,7 +77,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 517712ebd62..21e4d6e6262 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -390,7 +390,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index abd15d947e9..52433562278 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -389,7 +389,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # adal diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index 964cd85822d..36c7e86ba0f 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -397,7 +397,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 80168bb9525..573ead84dc9 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -403,7 +403,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index ecc0e21110f..99ffa451b05 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -73,7 +73,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 3b47a0e4eac..6072fd15e1d 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -390,7 +390,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 7d019849de5..41015d5eaf7 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -391,7 +391,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/darwin.txt # adal diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 78c2e3d3de8..6b7ce5f331e 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -389,7 +389,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # adal diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index fc246bac01d..c75cc0104c6 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -395,7 +395,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==1.0.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 5f07981bec6..447952eb9dd 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -405,7 +405,7 @@ contextvars==2.4 # via -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # adal diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 5ea6ed98dff..0cdfe464e41 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -73,7 +73,7 @@ colorama==0.4.1 # via pytest contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # etcd3-py diff --git a/requirements/static/pkg/freebsd.in b/requirements/static/pkg/freebsd.in index 453c2c3633c..1e176a3bed5 100644 --- a/requirements/static/pkg/freebsd.in +++ b/requirements/static/pkg/freebsd.in @@ -1,7 +1,7 @@ # This file only exists to trigger the right static compiled requirements destination # Any non hard dependencies of Salt for FreeBSD can go here cherrypy -cryptography>=41.0.2 +cryptography>=41.0.3 backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' pycparser>=2.21; python_version >= '3.9' pyopenssl>=23.2.0 diff --git a/requirements/static/pkg/linux.in b/requirements/static/pkg/linux.in index fb3f0a8e154..a47fa824571 100644 --- a/requirements/static/pkg/linux.in +++ b/requirements/static/pkg/linux.in @@ -10,4 +10,4 @@ rpm-vercmp setproctitle>=1.2.3 timelib>=0.2.5 importlib-metadata>=3.3.0 -cryptography>=41.0.2 +cryptography>=41.0.3 diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index fd3f1f08908..3fea335831e 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -18,7 +18,7 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/darwin.txt # pyopenssl diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index d995308534f..5823a037e23 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # pyopenssl diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index 8a638b09392..c39dcc61e0e 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # pyopenssl diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 6eda1b8107f..06a36539e8e 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # pyopenssl diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index 2bc8d746ce2..63050b277cc 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # pyopenssl diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index 77152d7c355..218de2ed965 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # pyopenssl diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index 902910e6e4f..c35aadc8d7d 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # pyopenssl diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index 6534349e226..45aba1e9a87 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # pyopenssl diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index 18aa236f77f..2c89423d7d5 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # pyopenssl diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index d529eb5450c..70bf44a1c76 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # pyopenssl diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 45aca678f08..89a9792a886 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -18,7 +18,7 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/darwin.txt # pyopenssl diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 0154d1ef404..6c52150bc8f 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/freebsd.in # pyopenssl diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index 62dc8d341b7..ec77b1bd49e 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -16,7 +16,7 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/static/pkg/linux.in # pyopenssl diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index 03b89c5632b..17f02c1119d 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.2 +cryptography==41.0.3 # via # -r requirements/windows.txt # pyopenssl diff --git a/requirements/windows.txt b/requirements/windows.txt index 77efe2ca55b..998bdd2dadc 100644 --- a/requirements/windows.txt +++ b/requirements/windows.txt @@ -10,7 +10,7 @@ backports.ssl-match-hostname>=3.7.0.1; python_version < '3.7' certifi>=2022.12.07 cffi>=1.14.5 cherrypy>=18.6.1 -cryptography>=41.0.2 +cryptography>=41.0.3 gitpython>=3.1.30; python_version >= '3.7' ioloop>=0.1a0 lxml>=4.6.3 From 4edf5015ac5d598590ee58e57207635088c059bd Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Thu, 3 Aug 2023 07:18:31 -0600 Subject: [PATCH 25/73] Update changelog for cryptography req upgrade --- changelog/64595.security.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/changelog/64595.security.md b/changelog/64595.security.md index 737603b7704..0875e68e625 100644 --- a/changelog/64595.security.md +++ b/changelog/64595.security.md @@ -1,10 +1,11 @@ -Upgrade to `cryptography==41.0.2`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-5cpq-8wj7-hf2v) +Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. Also resolves the following cryptography advisories: Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r From 6120bcac2ee79b6e8b104612941432841eb0c8c3 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 19 Jul 2023 11:53:05 -0500 Subject: [PATCH 26/73] Fixed gitfs cachedir_basename to avoid hash collisions --- changelog/cve-2023-20898.security.md | 1 + salt/utils/gitfs.py | 55 ++++- tests/pytests/unit/utils/test_gitfs.py | 253 ++++++++++++++++++++++ tests/unit/utils/test_gitfs.py | 286 ++++++------------------- 4 files changed, 371 insertions(+), 224 deletions(-) create mode 100644 changelog/cve-2023-20898.security.md create mode 100644 tests/pytests/unit/utils/test_gitfs.py diff --git a/changelog/cve-2023-20898.security.md b/changelog/cve-2023-20898.security.md new file mode 100644 index 00000000000..295e9d27bf9 --- /dev/null +++ b/changelog/cve-2023-20898.security.md @@ -0,0 +1 @@ +Fixed gtfs cachedir_basename to avoid hash collisions. Added MP Lock to gtfs. These changes should stop race conditions. diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index cc9895d8ab9..84e124dc8ab 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -11,6 +11,7 @@ import glob import hashlib import io import logging +import multiprocessing import os import shlex import shutil @@ -225,6 +226,10 @@ class GitProvider: invoking the parent class' __init__. """ + # master lock should only be locked for very short periods of times "seconds" + # the master lock should be used when ever git provider reads or writes to one if it locks + _master_lock = multiprocessing.Lock() + def __init__( self, opts, @@ -451,10 +456,19 @@ class GitProvider: failhard(self.role) hash_type = getattr(hashlib, self.opts.get("hash_type", "md5")) + # Generate full id. The full id is made from these parts name-id-env-_root. + # Full id stops collections in the gtfs cache. + self._full_id = "-".join( + [ + getattr(self, "name", ""), + self.id.replace(" ", "-"), + getattr(self, "env", ""), + getattr(self, "_root", ""), + ] + ) # We loaded this data from yaml configuration files, so, its safe # to use UTF-8 - self.hash = hash_type(self.id.encode("utf-8")).hexdigest() - self.cachedir_basename = getattr(self, "name", self.hash) + self.cachedir_basename = f"{getattr(self, 'name', '')}-{hash_type(self._full_id.encode('utf-8')).hexdigest()}" self.cachedir = salt.utils.path.join(cache_root, self.cachedir_basename) self.linkdir = salt.utils.path.join(cache_root, "links", self.cachedir_basename) @@ -472,6 +486,12 @@ class GitProvider: log.critical(msg, exc_info=True) failhard(self.role) + def full_id(self): + return self._full_id + + def get_cachedir_basename(self): + return self.cachedir_basename + def _get_envs_from_ref_paths(self, refs): """ Return the names of remote refs (stripped of the remote name) and tags @@ -662,6 +682,19 @@ class GitProvider: """ Clear update.lk """ + if self.__class__._master_lock.acquire(timeout=60) is False: + # if gtfs works right we should never see this timeout error. + log.error("gtfs master lock timeout!") + raise TimeoutError("gtfs master lock timeout!") + try: + return self._clear_lock(lock_type) + finally: + self.__class__._master_lock.release() + + def _clear_lock(self, lock_type="update"): + """ + Clear update.lk without MultiProcessing locks + """ lock_file = self._get_lock_file(lock_type=lock_type) def _add_error(errlist, exc): @@ -834,6 +867,20 @@ class GitProvider: return False def _lock(self, lock_type="update", failhard=False): + """ + Place a lock file if (and only if) it does not already exist. + Without MultiProcessing locks. + """ + if self.__class__._master_lock.acquire(timeout=60) is False: + # if gtfs works right we should never see this timeout error. + log.error("gtfs master lock timeout!") + raise TimeoutError("gtfs master lock timeout!") + try: + return self.__lock(lock_type, failhard) + finally: + self.__class__._master_lock.release() + + def __lock(self, lock_type="update", failhard=False): """ Place a lock file if (and only if) it does not already exist. """ @@ -903,9 +950,9 @@ class GitProvider: lock_type, lock_file, ) - success, fail = self.clear_lock() + success, fail = self._clear_lock() if success: - return self._lock(lock_type="update", failhard=failhard) + return self.__lock(lock_type="update", failhard=failhard) elif failhard: raise return diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py new file mode 100644 index 00000000000..5395810d5ed --- /dev/null +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -0,0 +1,253 @@ +import os +import shutil +from time import time + +import pytest + +import salt.fileserver.gitfs +import salt.utils.gitfs +import tests.support.paths +from salt.exceptions import FileserverConfigError +from tests.support.mock import MagicMock, patch + +try: + HAS_PYGIT2 = ( + salt.utils.gitfs.PYGIT2_VERSION + and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER + and salt.utils.gitfs.LIBGIT2_VERSION + and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER + ) +except AttributeError: + HAS_PYGIT2 = False + + +if HAS_PYGIT2: + import pygit2 + + +def test_provider_case_insensitive_gtfs_provider(): + """ + Ensure that both lowercase and non-lowercase values are supported + """ + opts = {"cachedir": "/tmp/gitfs-test-cache"} + provider = "GitPython" + for role_name, role_class in ( + ("gitfs", salt.utils.gitfs.GitFS), + ("git_pillar", salt.utils.gitfs.GitPillar), + ("winrepo", salt.utils.gitfs.WinRepo), + ): + + key = "{}_provider".format(role_name) + with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)): + with patch.object( + role_class, "verify_pygit2", MagicMock(return_value=False) + ): + args = [opts, {}] + kwargs = {"init_remotes": False} + if role_name == "winrepo": + kwargs["cache_root"] = "/tmp/winrepo-dir" + with patch.dict(opts, {key: provider}): + # Try to create an instance with uppercase letters in + # provider name. If it fails then a + # FileserverConfigError will be raised, so no assert is + # necessary. + role_class(*args, **kwargs) + # Now try to instantiate an instance with all lowercase + # letters. Again, no need for an assert here. + role_class(*args, **kwargs) + + +def test_valid_provider_gtfs_provider(): + """ + Ensure that an invalid provider is not accepted, raising a + FileserverConfigError. + """ + opts = {"cachedir": "/tmp/gitfs-test-cache"} + + def _get_mock(verify, provider): + """ + Return a MagicMock with the desired return value + """ + return MagicMock(return_value=verify.endswith(provider)) + + for role_name, role_class in ( + ("gitfs", salt.utils.gitfs.GitFS), + ("git_pillar", salt.utils.gitfs.GitPillar), + ("winrepo", salt.utils.gitfs.WinRepo), + ): + key = "{}_provider".format(role_name) + for provider in salt.utils.gitfs.GIT_PROVIDERS: + verify = "verify_gitpython" + mock1 = _get_mock(verify, provider) + with patch.object(role_class, verify, mock1): + verify = "verify_pygit2" + mock2 = _get_mock(verify, provider) + with patch.object(role_class, verify, mock2): + args = [opts, {}] + kwargs = {"init_remotes": False} + if role_name == "winrepo": + kwargs["cache_root"] = "/tmp/winrepo-dir" + + with patch.dict(opts, {key: provider}): + role_class(*args, **kwargs) + + with patch.dict(opts, {key: "foo"}): + # Set the provider name to a known invalid provider + # and make sure it raises an exception. + with pytest.raises(FileserverConfigError): + role_class(*args, **kwargs) + + +def _prepare_remote_repository_pygit2(path): + shutil.rmtree(path, ignore_errors=True) + + filecontent = "This is an empty README file" + filename = "README" + + signature = pygit2.Signature("Dummy Commiter", "dummy@dummy.com", int(time()), 0) + + repository = pygit2.init_repository(path, False) + builder = repository.TreeBuilder() + tree = builder.write() + commit = repository.create_commit( + "HEAD", signature, signature, "Create master branch", tree, [] + ) + repository.create_reference("refs/tags/simple_tag", commit) + + with salt.utils.files.fopen( + os.path.join(repository.workdir, filename), "w" + ) as file: + file.write(filecontent) + + blob = repository.create_blob_fromworkdir(filename) + builder = repository.TreeBuilder() + builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB) + tree = builder.write() + + repository.index.read() + repository.index.add(filename) + repository.index.write() + + commit = repository.create_commit( + "HEAD", + signature, + signature, + "Added a README", + tree, + [repository.head.target], + ) + repository.create_tag( + "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message" + ) + + +def _prepare_cache_repository_pygit2(remote, cache): + opts = { + "cachedir": cache, + "__role": "minion", + "gitfs_disable_saltenv_mapping": False, + "gitfs_base": "master", + "gitfs_insecure_auth": False, + "gitfs_mountpoint": "", + "gitfs_passphrase": "", + "gitfs_password": "", + "gitfs_privkey": "", + "gitfs_provider": "pygit2", + "gitfs_pubkey": "", + "gitfs_ref_types": ["branch", "tag", "sha"], + "gitfs_refspecs": [ + "+refs/heads/*:refs/remotes/origin/*", + "+refs/tags/*:refs/tags/*", + ], + "gitfs_root": "", + "gitfs_saltenv_blacklist": [], + "gitfs_saltenv_whitelist": [], + "gitfs_ssl_verify": True, + "gitfs_update_interval": 3, + "gitfs_user": "", + "verified_gitfs_provider": "pygit2", + } + per_remote_defaults = { + "base": "master", + "disable_saltenv_mapping": False, + "insecure_auth": False, + "ref_types": ["branch", "tag", "sha"], + "passphrase": "", + "mountpoint": "", + "password": "", + "privkey": "", + "pubkey": "", + "refspecs": [ + "+refs/heads/*:refs/remotes/origin/*", + "+refs/tags/*:refs/tags/*", + ], + "root": "", + "saltenv_blacklist": [], + "saltenv_whitelist": [], + "ssl_verify": True, + "update_interval": 60, + "user": "", + } + per_remote_only = ("all_saltenvs", "name", "saltenv") + override_params = tuple(per_remote_defaults.keys()) + cache_root = os.path.join(cache, "gitfs") + role = "gitfs" + shutil.rmtree(cache_root, ignore_errors=True) + provider = salt.utils.gitfs.Pygit2( + opts, + remote, + per_remote_defaults, + per_remote_only, + override_params, + cache_root, + role, + ) + return provider + + +@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") +@pytest.mark.skip_on_windows( + reason="Skip Pygit2 on windows, due to pygit2 access error on windows" +) +def test_checkout_pygit2(): + remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") + cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") + _prepare_remote_repository_pygit2(remote) + provider = _prepare_cache_repository_pygit2(remote, cache) + provider.remotecallbacks = None + provider.credentials = None + provider.init_remote() + provider.fetch() + provider.branch = "master" + assert provider.cachedir in provider.checkout() + provider.branch = "simple_tag" + assert provider.cachedir in provider.checkout() + provider.branch = "annotated_tag" + assert provider.cachedir in provider.checkout() + provider.branch = "does_not_exist" + assert provider.checkout() is None + + +@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") +@pytest.mark.skip_on_windows( + reason="Skip Pygit2 on windows, due to pygit2 access error on windows" +) +def test_full_id_pygit2(): + remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") + cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") + _prepare_remote_repository_pygit2(remote) + provider = _prepare_cache_repository_pygit2(remote, cache) + assert provider.full_id() == "-/tmp/salt-tests-tmpdir/pygit2-repo--" + + +@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") +@pytest.mark.skip_on_windows( + reason="Skip Pygit2 on windows, due to pygit2 access error on windows" +) +def test_get_cachedir_basename_pygit2(): + remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") + cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") + _prepare_remote_repository_pygit2(remote) + provider = _prepare_cache_repository_pygit2(remote, cache) + # Note: changing full id or the hash type will change this output + assert provider.get_cachedir_basename() == "-f2921dbe1e0a05111ef51c6dea256a47" diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py index b99da3ef916..050686f38d6 100644 --- a/tests/unit/utils/test_gitfs.py +++ b/tests/unit/utils/test_gitfs.py @@ -2,36 +2,20 @@ These only test the provider selection and verification logic, they do not init any remotes. """ -import os -import shutil -from time import time + +import tempfile import pytest +import salt.ext.tornado.ioloop import salt.fileserver.gitfs import salt.utils.files import salt.utils.gitfs +import salt.utils.path import salt.utils.platform -import tests.support.paths -from salt.exceptions import FileserverConfigError from tests.support.mixins import AdaptedConfigurationTestCaseMixin -from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase -try: - HAS_PYGIT2 = ( - salt.utils.gitfs.PYGIT2_VERSION - and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER - and salt.utils.gitfs.LIBGIT2_VERSION - and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER - ) -except AttributeError: - HAS_PYGIT2 = False - - -if HAS_PYGIT2: - import pygit2 - def _clear_instance_map(): try: @@ -44,6 +28,9 @@ def _clear_instance_map(): class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): def setUp(self): + self._tmp_dir = tempfile.TemporaryDirectory() + tmp_name = self._tmp_dir.name + class MockedProvider( salt.utils.gitfs.GitProvider ): # pylint: disable=abstract-method @@ -70,6 +57,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): ) def init_remote(self): + self.gitdir = salt.utils.path.join(tmp_name, ".git") self.repo = True new = False return new @@ -106,6 +94,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): for remote in self.main_class.remotes: remote.fetched = False del self.main_class + self._tmp_dir.cleanup() def test_update_all(self): self.main_class.update() @@ -125,213 +114,70 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): self.assertTrue(self.main_class.remotes[0].fetched) self.assertFalse(self.main_class.remotes[1].fetched) + def test_full_id(self): + self.assertEqual(self.main_class.remotes[0].full_id(), "-file://repo1.git--") -class TestGitFSProvider(TestCase): - def setUp(self): - self.opts = {"cachedir": "/tmp/gitfs-test-cache"} + def test_full_id_with_name(self): + self.assertEqual( + self.main_class.remotes[1].full_id(), "repo2-file://repo2.git--" + ) - def tearDown(self): - self.opts = None + def test_get_cachedir_basename(self): + self.assertEqual( + self.main_class.remotes[0].get_cachedir_basename(), + "-b4dcbd51b08742ec23eaf96d192d29b417ec137ea7ca0c0de2515cfaf6e26860", + ) - def test_provider_case_insensitive(self): + def test_get_cachedir_base_with_name(self): + self.assertEqual( + self.main_class.remotes[1].get_cachedir_basename(), + "repo2-4218c2f8e303c6ea24cc541d8748e523d5b443c3050170a43a1a00be253b56aa", + ) + + def test_git_provider_mp_lock(self): """ - Ensure that both lowercase and non-lowercase values are supported + Check that lock is released after provider.lock() """ - provider = "GitPython" - for role_name, role_class in ( - ("gitfs", salt.utils.gitfs.GitFS), - ("git_pillar", salt.utils.gitfs.GitPillar), - ("winrepo", salt.utils.gitfs.WinRepo), - ): + provider = self.main_class.remotes[0] + provider.lock() + # check that lock has been released + self.assertTrue(provider._master_lock.acquire(timeout=5)) + provider._master_lock.release() - key = "{}_provider".format(role_name) - with patch.object( - role_class, "verify_gitpython", MagicMock(return_value=True) - ): - with patch.object( - role_class, "verify_pygit2", MagicMock(return_value=False) - ): - args = [self.opts, {}] - kwargs = {"init_remotes": False} - if role_name == "winrepo": - kwargs["cache_root"] = "/tmp/winrepo-dir" - with patch.dict(self.opts, {key: provider}): - # Try to create an instance with uppercase letters in - # provider name. If it fails then a - # FileserverConfigError will be raised, so no assert is - # necessary. - role_class(*args, **kwargs) - # Now try to instantiate an instance with all lowercase - # letters. Again, no need for an assert here. - role_class(*args, **kwargs) - - def test_valid_provider(self): + def test_git_provider_mp_clear_lock(self): """ - Ensure that an invalid provider is not accepted, raising a - FileserverConfigError. + Check that lock is released after provider.clear_lock() """ + provider = self.main_class.remotes[0] + provider.clear_lock() + # check that lock has been released + self.assertTrue(provider._master_lock.acquire(timeout=5)) + provider._master_lock.release() - def _get_mock(verify, provider): - """ - Return a MagicMock with the desired return value - """ - return MagicMock(return_value=verify.endswith(provider)) + @pytest.mark.slow_test + def test_git_provider_mp_lock_timeout(self): + """ + Check that lock will time out if master lock is locked. + """ + provider = self.main_class.remotes[0] + # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. + self.assertTrue(provider._master_lock.acquire(timeout=5)) + try: + # git provider should raise timeout error to avoid lock race conditions + self.assertRaises(TimeoutError, provider.lock) + finally: + provider._master_lock.release() - for role_name, role_class in ( - ("gitfs", salt.utils.gitfs.GitFS), - ("git_pillar", salt.utils.gitfs.GitPillar), - ("winrepo", salt.utils.gitfs.WinRepo), - ): - key = "{}_provider".format(role_name) - for provider in salt.utils.gitfs.GIT_PROVIDERS: - verify = "verify_gitpython" - mock1 = _get_mock(verify, provider) - with patch.object(role_class, verify, mock1): - verify = "verify_pygit2" - mock2 = _get_mock(verify, provider) - with patch.object(role_class, verify, mock2): - args = [self.opts, {}] - kwargs = {"init_remotes": False} - if role_name == "winrepo": - kwargs["cache_root"] = "/tmp/winrepo-dir" - - with patch.dict(self.opts, {key: provider}): - role_class(*args, **kwargs) - - with patch.dict(self.opts, {key: "foo"}): - # Set the provider name to a known invalid provider - # and make sure it raises an exception. - self.assertRaises( - FileserverConfigError, role_class, *args, **kwargs - ) - - -@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") -@pytest.mark.skip_on_windows( - reason="Skip Pygit2 on windows, due to pygit2 access error on windows" -) -class TestPygit2(TestCase): - def _prepare_remote_repository(self, path): - shutil.rmtree(path, ignore_errors=True) - - filecontent = "This is an empty README file" - filename = "README" - - signature = pygit2.Signature( - "Dummy Commiter", "dummy@dummy.com", int(time()), 0 - ) - - repository = pygit2.init_repository(path, False) - builder = repository.TreeBuilder() - tree = builder.write() - commit = repository.create_commit( - "HEAD", signature, signature, "Create master branch", tree, [] - ) - repository.create_reference("refs/tags/simple_tag", commit) - - with salt.utils.files.fopen( - os.path.join(repository.workdir, filename), "w" - ) as file: - file.write(filecontent) - - blob = repository.create_blob_fromworkdir(filename) - builder = repository.TreeBuilder() - builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB) - tree = builder.write() - - repository.index.read() - repository.index.add(filename) - repository.index.write() - - commit = repository.create_commit( - "HEAD", - signature, - signature, - "Added a README", - tree, - [repository.head.target], - ) - repository.create_tag( - "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message" - ) - - def _prepare_cache_repository(self, remote, cache): - opts = { - "cachedir": cache, - "__role": "minion", - "gitfs_disable_saltenv_mapping": False, - "gitfs_base": "master", - "gitfs_insecure_auth": False, - "gitfs_mountpoint": "", - "gitfs_passphrase": "", - "gitfs_password": "", - "gitfs_privkey": "", - "gitfs_provider": "pygit2", - "gitfs_pubkey": "", - "gitfs_ref_types": ["branch", "tag", "sha"], - "gitfs_refspecs": [ - "+refs/heads/*:refs/remotes/origin/*", - "+refs/tags/*:refs/tags/*", - ], - "gitfs_root": "", - "gitfs_saltenv_blacklist": [], - "gitfs_saltenv_whitelist": [], - "gitfs_ssl_verify": True, - "gitfs_update_interval": 3, - "gitfs_user": "", - "verified_gitfs_provider": "pygit2", - } - per_remote_defaults = { - "base": "master", - "disable_saltenv_mapping": False, - "insecure_auth": False, - "ref_types": ["branch", "tag", "sha"], - "passphrase": "", - "mountpoint": "", - "password": "", - "privkey": "", - "pubkey": "", - "refspecs": [ - "+refs/heads/*:refs/remotes/origin/*", - "+refs/tags/*:refs/tags/*", - ], - "root": "", - "saltenv_blacklist": [], - "saltenv_whitelist": [], - "ssl_verify": True, - "update_interval": 60, - "user": "", - } - per_remote_only = ("all_saltenvs", "name", "saltenv") - override_params = tuple(per_remote_defaults.keys()) - cache_root = os.path.join(cache, "gitfs") - role = "gitfs" - shutil.rmtree(cache_root, ignore_errors=True) - provider = salt.utils.gitfs.Pygit2( - opts, - remote, - per_remote_defaults, - per_remote_only, - override_params, - cache_root, - role, - ) - return provider - - def test_checkout(self): - remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") - cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") - self._prepare_remote_repository(remote) - provider = self._prepare_cache_repository(remote, cache) - provider.remotecallbacks = None - provider.credentials = None - provider.init_remote() - provider.fetch() - provider.branch = "master" - self.assertIn(provider.cachedir, provider.checkout()) - provider.branch = "simple_tag" - self.assertIn(provider.cachedir, provider.checkout()) - provider.branch = "annotated_tag" - self.assertIn(provider.cachedir, provider.checkout()) - provider.branch = "does_not_exist" - self.assertIsNone(provider.checkout()) + @pytest.mark.slow_test + def test_git_provider_mp_clear_lock_timeout(self): + """ + Check that clear lock will time out if master lock is locked. + """ + provider = self.main_class.remotes[0] + # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. + self.assertTrue(provider._master_lock.acquire(timeout=5)) + try: + # git provider should raise timeout error to avoid lock race conditions + self.assertRaises(TimeoutError, provider.clear_lock) + finally: + provider._master_lock.release() From 7d2ad0d722d901579a3b0a26de75dafcf7c42137 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Mon, 24 Jul 2023 14:12:20 -0500 Subject: [PATCH 27/73] Fix gitfs tests --- tests/pytests/unit/utils/test_gitfs.py | 220 ++++++++++++------------- 1 file changed, 108 insertions(+), 112 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py index 5395810d5ed..a5bed3d8dbf 100644 --- a/tests/pytests/unit/utils/test_gitfs.py +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -1,12 +1,11 @@ import os -import shutil +import string from time import time import pytest import salt.fileserver.gitfs import salt.utils.gitfs -import tests.support.paths from salt.exceptions import FileserverConfigError from tests.support.mock import MagicMock, patch @@ -25,44 +24,58 @@ if HAS_PYGIT2: import pygit2 -def test_provider_case_insensitive_gtfs_provider(): - """ - Ensure that both lowercase and non-lowercase values are supported - """ - opts = {"cachedir": "/tmp/gitfs-test-cache"} - provider = "GitPython" - for role_name, role_class in ( +@pytest.mark.parametrize( + "role_name,role_class", + ( ("gitfs", salt.utils.gitfs.GitFS), ("git_pillar", salt.utils.gitfs.GitPillar), ("winrepo", salt.utils.gitfs.WinRepo), - ): - - key = "{}_provider".format(role_name) - with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)): - with patch.object( - role_class, "verify_pygit2", MagicMock(return_value=False) - ): - args = [opts, {}] - kwargs = {"init_remotes": False} - if role_name == "winrepo": - kwargs["cache_root"] = "/tmp/winrepo-dir" - with patch.dict(opts, {key: provider}): - # Try to create an instance with uppercase letters in - # provider name. If it fails then a - # FileserverConfigError will be raised, so no assert is - # necessary. - role_class(*args, **kwargs) - # Now try to instantiate an instance with all lowercase - # letters. Again, no need for an assert here. + ), +) +def test_provider_case_insensitive_gitfs_provider(minion_opts, role_name, role_class): + """ + Ensure that both lowercase and non-lowercase values are supported + """ + provider = "GitPython" + key = "{}_provider".format(role_name) + with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)): + with patch.object(role_class, "verify_pygit2", MagicMock(return_value=False)): + args = [minion_opts, {}] + kwargs = {"init_remotes": False} + if role_name == "winrepo": + kwargs["cache_root"] = "/tmp/winrepo-dir" + with patch.dict(minion_opts, {key: provider}): + # Try to create an instance with uppercase letters in + # provider name. If it fails then a + # FileserverConfigError will be raised, so no assert is + # necessary. role_class(*args, **kwargs) + # Now try to instantiate an instance with all lowercase + # letters. Again, no need for an assert here. + role_class(*args, **kwargs) + pytest.mark.parametrize( + "role_name,role_class", + ( + ("gitfs", salt.utils.gitfs.GitFS), + ("git_pillar", salt.utils.gitfs.GitPillar), + ("winrepo", salt.utils.gitfs.WinRepo), + ), + ) -def test_valid_provider_gtfs_provider(): +@pytest.mark.parametrize( + "role_name,role_class", + ( + ("gitfs", salt.utils.gitfs.GitFS), + ("git_pillar", salt.utils.gitfs.GitPillar), + ("winrepo", salt.utils.gitfs.WinRepo), + ), +) +def test_valid_provider_gitfs_provider(minion_opts, role_name, role_class): """ Ensure that an invalid provider is not accepted, raising a FileserverConfigError. """ - opts = {"cachedir": "/tmp/gitfs-test-cache"} def _get_mock(verify, provider): """ @@ -70,64 +83,51 @@ def test_valid_provider_gtfs_provider(): """ return MagicMock(return_value=verify.endswith(provider)) - for role_name, role_class in ( - ("gitfs", salt.utils.gitfs.GitFS), - ("git_pillar", salt.utils.gitfs.GitPillar), - ("winrepo", salt.utils.gitfs.WinRepo), - ): - key = "{}_provider".format(role_name) - for provider in salt.utils.gitfs.GIT_PROVIDERS: - verify = "verify_gitpython" - mock1 = _get_mock(verify, provider) - with patch.object(role_class, verify, mock1): - verify = "verify_pygit2" - mock2 = _get_mock(verify, provider) - with patch.object(role_class, verify, mock2): - args = [opts, {}] - kwargs = {"init_remotes": False} - if role_name == "winrepo": - kwargs["cache_root"] = "/tmp/winrepo-dir" - - with patch.dict(opts, {key: provider}): + key = "{}_provider".format(role_name) + for provider in salt.utils.gitfs.GIT_PROVIDERS: + verify = "verify_gitpython" + mock1 = _get_mock(verify, provider) + with patch.object(role_class, verify, mock1): + verify = "verify_pygit2" + mock2 = _get_mock(verify, provider) + with patch.object(role_class, verify, mock2): + args = [minion_opts, {}] + kwargs = {"init_remotes": False} + if role_name == "winrepo": + kwargs["cache_root"] = "/tmp/winrepo-dir" + with patch.dict(minion_opts, {key: provider}): + role_class(*args, **kwargs) + with patch.dict(minion_opts, {key: "foo"}): + # Set the provider name to a known invalid provider + # and make sure it raises an exception. + with pytest.raises(FileserverConfigError): role_class(*args, **kwargs) - with patch.dict(opts, {key: "foo"}): - # Set the provider name to a known invalid provider - # and make sure it raises an exception. - with pytest.raises(FileserverConfigError): - role_class(*args, **kwargs) - - -def _prepare_remote_repository_pygit2(path): - shutil.rmtree(path, ignore_errors=True) +@pytest.fixture +def _prepare_remote_repository_pygit2(tmp_path): + remote = os.path.join(tmp_path, "pygit2-repo") filecontent = "This is an empty README file" filename = "README" - signature = pygit2.Signature("Dummy Commiter", "dummy@dummy.com", int(time()), 0) - - repository = pygit2.init_repository(path, False) + repository = pygit2.init_repository(remote, False) builder = repository.TreeBuilder() tree = builder.write() commit = repository.create_commit( "HEAD", signature, signature, "Create master branch", tree, [] ) repository.create_reference("refs/tags/simple_tag", commit) - with salt.utils.files.fopen( os.path.join(repository.workdir, filename), "w" ) as file: file.write(filecontent) - blob = repository.create_blob_fromworkdir(filename) builder = repository.TreeBuilder() builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB) tree = builder.write() - repository.index.read() repository.index.add(filename) repository.index.write() - commit = repository.create_commit( "HEAD", signature, @@ -139,34 +139,38 @@ def _prepare_remote_repository_pygit2(path): repository.create_tag( "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message" ) + return remote -def _prepare_cache_repository_pygit2(remote, cache): - opts = { - "cachedir": cache, - "__role": "minion", - "gitfs_disable_saltenv_mapping": False, - "gitfs_base": "master", - "gitfs_insecure_auth": False, - "gitfs_mountpoint": "", - "gitfs_passphrase": "", - "gitfs_password": "", - "gitfs_privkey": "", - "gitfs_provider": "pygit2", - "gitfs_pubkey": "", - "gitfs_ref_types": ["branch", "tag", "sha"], - "gitfs_refspecs": [ - "+refs/heads/*:refs/remotes/origin/*", - "+refs/tags/*:refs/tags/*", - ], - "gitfs_root": "", - "gitfs_saltenv_blacklist": [], - "gitfs_saltenv_whitelist": [], - "gitfs_ssl_verify": True, - "gitfs_update_interval": 3, - "gitfs_user": "", - "verified_gitfs_provider": "pygit2", - } +@pytest.fixture +def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2): + cache = os.path.join(tmp_path, "pygit2-repo-cache") + minion_opts.update( + { + "cachedir": cache, + "gitfs_disable_saltenv_mapping": False, + "gitfs_base": "master", + "gitfs_insecure_auth": False, + "gitfs_mountpoint": "", + "gitfs_passphrase": "", + "gitfs_password": "", + "gitfs_privkey": "", + "gitfs_provider": "pygit2", + "gitfs_pubkey": "", + "gitfs_ref_types": ["branch", "tag", "sha"], + "gitfs_refspecs": [ + "+refs/heads/*:refs/remotes/origin/*", + "+refs/tags/*:refs/tags/*", + ], + "gitfs_root": "", + "gitfs_saltenv_blacklist": [], + "gitfs_saltenv_whitelist": [], + "gitfs_ssl_verify": True, + "gitfs_update_interval": 3, + "gitfs_user": "", + "verified_gitfs_provider": "pygit2", + } + ) per_remote_defaults = { "base": "master", "disable_saltenv_mapping": False, @@ -192,10 +196,9 @@ def _prepare_cache_repository_pygit2(remote, cache): override_params = tuple(per_remote_defaults.keys()) cache_root = os.path.join(cache, "gitfs") role = "gitfs" - shutil.rmtree(cache_root, ignore_errors=True) provider = salt.utils.gitfs.Pygit2( - opts, - remote, + minion_opts, + _prepare_remote_repository_pygit2, per_remote_defaults, per_remote_only, override_params, @@ -209,11 +212,8 @@ def _prepare_cache_repository_pygit2(remote, cache): @pytest.mark.skip_on_windows( reason="Skip Pygit2 on windows, due to pygit2 access error on windows" ) -def test_checkout_pygit2(): - remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") - cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") - _prepare_remote_repository_pygit2(remote) - provider = _prepare_cache_repository_pygit2(remote, cache) +def test_checkout_pygit2(_prepare_provider): + provider = _prepare_provider provider.remotecallbacks = None provider.credentials = None provider.init_remote() @@ -232,22 +232,18 @@ def test_checkout_pygit2(): @pytest.mark.skip_on_windows( reason="Skip Pygit2 on windows, due to pygit2 access error on windows" ) -def test_full_id_pygit2(): - remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") - cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") - _prepare_remote_repository_pygit2(remote) - provider = _prepare_cache_repository_pygit2(remote, cache) - assert provider.full_id() == "-/tmp/salt-tests-tmpdir/pygit2-repo--" +def test_full_id_pygit2(_prepare_provider): + assert _prepare_provider.full_id().startswith("-") + assert _prepare_provider.full_id().endswith("/pygit2-repo--") @pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") @pytest.mark.skip_on_windows( reason="Skip Pygit2 on windows, due to pygit2 access error on windows" ) -def test_get_cachedir_basename_pygit2(): - remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") - cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") - _prepare_remote_repository_pygit2(remote) - provider = _prepare_cache_repository_pygit2(remote, cache) - # Note: changing full id or the hash type will change this output - assert provider.get_cachedir_basename() == "-f2921dbe1e0a05111ef51c6dea256a47" +def test_get_cachedir_basename_pygit2(_prepare_provider): + basename = _prepare_provider.get_cachedir_basename() + assert len(basename) > 1 + assert basename[0] == "-" + # check that a valid hex is given + assert all(c in string.hexdigits for c in basename[1:]) From 5a3b8eb35f2bc718f7fd80385e991906de4348b3 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 27 Jul 2023 07:33:02 +0100 Subject: [PATCH 28/73] It's `gitfs` not `gtfs`, plus some code fixes and cleanup Signed-off-by: Pedro Algarvio --- changelog/cve-2023-20898.security.md | 2 +- salt/utils/gitfs.py | 14 +++++++------- tests/pytests/unit/utils/test_gitfs.py | 24 +++++++++--------------- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/changelog/cve-2023-20898.security.md b/changelog/cve-2023-20898.security.md index 295e9d27bf9..44f1729192d 100644 --- a/changelog/cve-2023-20898.security.md +++ b/changelog/cve-2023-20898.security.md @@ -1 +1 @@ -Fixed gtfs cachedir_basename to avoid hash collisions. Added MP Lock to gtfs. These changes should stop race conditions. +Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 84e124dc8ab..8ff21e23586 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -457,7 +457,7 @@ class GitProvider: hash_type = getattr(hashlib, self.opts.get("hash_type", "md5")) # Generate full id. The full id is made from these parts name-id-env-_root. - # Full id stops collections in the gtfs cache. + # Full id stops collections in the gitfs cache. self._full_id = "-".join( [ getattr(self, "name", ""), @@ -683,9 +683,9 @@ class GitProvider: Clear update.lk """ if self.__class__._master_lock.acquire(timeout=60) is False: - # if gtfs works right we should never see this timeout error. - log.error("gtfs master lock timeout!") - raise TimeoutError("gtfs master lock timeout!") + # if gitfs works right we should never see this timeout error. + log.error("gitfs master lock timeout!") + raise TimeoutError("gitfs master lock timeout!") try: return self._clear_lock(lock_type) finally: @@ -872,9 +872,9 @@ class GitProvider: Without MultiProcessing locks. """ if self.__class__._master_lock.acquire(timeout=60) is False: - # if gtfs works right we should never see this timeout error. - log.error("gtfs master lock timeout!") - raise TimeoutError("gtfs master lock timeout!") + # if gitfs works right we should never see this timeout error. + log.error("gitfs master lock timeout!") + raise TimeoutError("gitfs master lock timeout!") try: return self.__lock(lock_type, failhard) finally: diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py index a5bed3d8dbf..1367119ff4d 100644 --- a/tests/pytests/unit/utils/test_gitfs.py +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -1,6 +1,6 @@ import os import string -from time import time +import time import pytest @@ -53,14 +53,6 @@ def test_provider_case_insensitive_gitfs_provider(minion_opts, role_name, role_c # Now try to instantiate an instance with all lowercase # letters. Again, no need for an assert here. role_class(*args, **kwargs) - pytest.mark.parametrize( - "role_name,role_class", - ( - ("gitfs", salt.utils.gitfs.GitFS), - ("git_pillar", salt.utils.gitfs.GitPillar), - ("winrepo", salt.utils.gitfs.WinRepo), - ), - ) @pytest.mark.parametrize( @@ -109,7 +101,9 @@ def _prepare_remote_repository_pygit2(tmp_path): remote = os.path.join(tmp_path, "pygit2-repo") filecontent = "This is an empty README file" filename = "README" - signature = pygit2.Signature("Dummy Commiter", "dummy@dummy.com", int(time()), 0) + signature = pygit2.Signature( + "Dummy Commiter", "dummy@dummy.com", int(time.time()), 0 + ) repository = pygit2.init_repository(remote, False) builder = repository.TreeBuilder() tree = builder.write() @@ -144,10 +138,10 @@ def _prepare_remote_repository_pygit2(tmp_path): @pytest.fixture def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2): - cache = os.path.join(tmp_path, "pygit2-repo-cache") + cache = tmp_path / "pygit2-repo-cache" minion_opts.update( { - "cachedir": cache, + "cachedir": str(cache), "gitfs_disable_saltenv_mapping": False, "gitfs_base": "master", "gitfs_insecure_auth": False, @@ -193,8 +187,8 @@ def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2): "user": "", } per_remote_only = ("all_saltenvs", "name", "saltenv") - override_params = tuple(per_remote_defaults.keys()) - cache_root = os.path.join(cache, "gitfs") + override_params = tuple(per_remote_defaults) + cache_root = cache / "gitfs" role = "gitfs" provider = salt.utils.gitfs.Pygit2( minion_opts, @@ -202,7 +196,7 @@ def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2): per_remote_defaults, per_remote_only, override_params, - cache_root, + str(cache_root), role, ) return provider From 5f5bfbd256176dd215341148ebd8e6bd9cff74bf Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Mon, 31 Jul 2023 12:19:14 -0500 Subject: [PATCH 29/73] update tests --- tests/pytests/unit/utils/test_gitfs.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py index 1367119ff4d..e3cb613b390 100644 --- a/tests/pytests/unit/utils/test_gitfs.py +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -4,6 +4,7 @@ import time import pytest +import salt.config import salt.fileserver.gitfs import salt.utils.gitfs from salt.exceptions import FileserverConfigError @@ -24,6 +25,23 @@ if HAS_PYGIT2: import pygit2 +@pytest.fixture +def minion_opts(tmp_path): + """ + Default minion configuration with relative temporary paths to not require root permissions. + """ + root_dir = tmp_path / "minion" + opts = salt.config.DEFAULT_MINION_OPTS.copy() + opts["__role"] = "minion" + opts["root_dir"] = str(root_dir) + for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"): + dirpath = root_dir / name + dirpath.mkdir(parents=True) + opts[name] = str(dirpath) + opts["log_file"] = "logs/minion.log" + return opts + + @pytest.mark.parametrize( "role_name,role_class", ( From b2871f5ee5e71a7c9276746d80209b56503f25ff Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Mon, 31 Jul 2023 12:54:44 -0500 Subject: [PATCH 30/73] fix doc --- salt/utils/gitfs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 8ff21e23586..aaaa8ebeb57 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -869,7 +869,6 @@ class GitProvider: def _lock(self, lock_type="update", failhard=False): """ Place a lock file if (and only if) it does not already exist. - Without MultiProcessing locks. """ if self.__class__._master_lock.acquire(timeout=60) is False: # if gitfs works right we should never see this timeout error. @@ -883,6 +882,7 @@ class GitProvider: def __lock(self, lock_type="update", failhard=False): """ Place a lock file if (and only if) it does not already exist. + Without MultiProcessing locks. """ try: fh_ = os.open( From 9fc3d7601fa4f9d8f7d581387a22366a0186c94f Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Tue, 1 Aug 2023 10:05:22 -0500 Subject: [PATCH 31/73] wip --- salt/runners/winrepo.py | 35 +++++++++++++++-------------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/salt/runners/winrepo.py b/salt/runners/winrepo.py index 9e31040884c..23a15834bae 100644 --- a/salt/runners/winrepo.py +++ b/salt/runners/winrepo.py @@ -224,25 +224,20 @@ def update_git_repos(opts=None, clean=False, masterless=False): ret.update(winrepo_result) else: # New winrepo code utilizing salt.utils.gitfs - try: - winrepo = salt.utils.gitfs.WinRepo( - opts, - remotes, - per_remote_overrides=PER_REMOTE_OVERRIDES, - per_remote_only=PER_REMOTE_ONLY, - global_only=GLOBAL_ONLY, - cache_root=base_dir, - ) - winrepo.fetch_remotes() - # Since we're not running update(), we need to manually call - # clear_old_remotes() to remove directories from remotes that - # have been removed from configuration. - if clean: - winrepo.clear_old_remotes() - winrepo.checkout() - except Exception as exc: # pylint: disable=broad-except - msg = "Failed to update winrepo_remotes: {}".format(exc) - log.error(msg, exc_info_on_loglevel=logging.DEBUG) - return msg + winrepo = salt.utils.gitfs.WinRepo( + opts, + remotes, + per_remote_overrides=PER_REMOTE_OVERRIDES, + per_remote_only=PER_REMOTE_ONLY, + global_only=GLOBAL_ONLY, + cache_root=base_dir, + ) + winrepo.fetch_remotes() + # Since we're not running update(), we need to manually call + # clear_old_remotes() to remove directories from remotes that + # have been removed from configuration. + if clean: + winrepo.clear_old_remotes() + winrepo.checkout() ret.update(winrepo.winrepo_dirs) return ret From 254da5617cb4aa47bfcf379f932cd4c8fc92abb4 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Tue, 1 Aug 2023 12:18:27 -0500 Subject: [PATCH 32/73] rwip --- salt/runners/winrepo.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/salt/runners/winrepo.py b/salt/runners/winrepo.py index 23a15834bae..9e31040884c 100644 --- a/salt/runners/winrepo.py +++ b/salt/runners/winrepo.py @@ -224,20 +224,25 @@ def update_git_repos(opts=None, clean=False, masterless=False): ret.update(winrepo_result) else: # New winrepo code utilizing salt.utils.gitfs - winrepo = salt.utils.gitfs.WinRepo( - opts, - remotes, - per_remote_overrides=PER_REMOTE_OVERRIDES, - per_remote_only=PER_REMOTE_ONLY, - global_only=GLOBAL_ONLY, - cache_root=base_dir, - ) - winrepo.fetch_remotes() - # Since we're not running update(), we need to manually call - # clear_old_remotes() to remove directories from remotes that - # have been removed from configuration. - if clean: - winrepo.clear_old_remotes() - winrepo.checkout() + try: + winrepo = salt.utils.gitfs.WinRepo( + opts, + remotes, + per_remote_overrides=PER_REMOTE_OVERRIDES, + per_remote_only=PER_REMOTE_ONLY, + global_only=GLOBAL_ONLY, + cache_root=base_dir, + ) + winrepo.fetch_remotes() + # Since we're not running update(), we need to manually call + # clear_old_remotes() to remove directories from remotes that + # have been removed from configuration. + if clean: + winrepo.clear_old_remotes() + winrepo.checkout() + except Exception as exc: # pylint: disable=broad-except + msg = "Failed to update winrepo_remotes: {}".format(exc) + log.error(msg, exc_info_on_loglevel=logging.DEBUG) + return msg ret.update(winrepo.winrepo_dirs) return ret From 15462bd399c9518ba282287085ecf8bea5b23be8 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Tue, 1 Aug 2023 18:54:39 -0500 Subject: [PATCH 33/73] wrap sha in base64 --- salt/utils/gitfs.py | 13 ++++++++++--- tests/pytests/unit/utils/test_gitfs.py | 6 +++--- tests/unit/utils/test_gitfs.py | 4 ++-- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index aaaa8ebeb57..9c20b822229 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -3,6 +3,7 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo """ +import base64 import contextlib import copy import errno @@ -457,7 +458,7 @@ class GitProvider: hash_type = getattr(hashlib, self.opts.get("hash_type", "md5")) # Generate full id. The full id is made from these parts name-id-env-_root. - # Full id stops collections in the gitfs cache. + # Full id helps decrease the chances of collections in the gitfs cache. self._full_id = "-".join( [ getattr(self, "name", ""), @@ -468,10 +469,16 @@ class GitProvider: ) # We loaded this data from yaml configuration files, so, its safe # to use UTF-8 - self.cachedir_basename = f"{getattr(self, 'name', '')}-{hash_type(self._full_id.encode('utf-8')).hexdigest()}" + log.error(hash_type(self._full_id.encode("utf-8")).hexdigest()) + base64_hash = str( + base64.b64encode(hash_type(self._full_id.encode("utf-8")).digest()), + encoding="utf-8", + ).replace("/", "_") + # limit name length to 19, so we don't eat up all the path length for windows + # this is due to pygit2 limitations + self.cachedir_basename = f"{getattr(self, 'name', '')[:19]}-{base64_hash}" self.cachedir = salt.utils.path.join(cache_root, self.cachedir_basename) self.linkdir = salt.utils.path.join(cache_root, "links", self.cachedir_basename) - if not os.path.isdir(self.cachedir): os.makedirs(self.cachedir) diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py index e3cb613b390..cc1a920f402 100644 --- a/tests/pytests/unit/utils/test_gitfs.py +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -255,7 +255,7 @@ def test_full_id_pygit2(_prepare_provider): ) def test_get_cachedir_basename_pygit2(_prepare_provider): basename = _prepare_provider.get_cachedir_basename() - assert len(basename) > 1 + assert len(basename) == 45 assert basename[0] == "-" - # check that a valid hex is given - assert all(c in string.hexdigits for c in basename[1:]) + # check that a valid base64 is given '/' -> '_' + assert all(c in string.ascii_letters + string.digits + "+_=" for c in basename[1:]) diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py index 050686f38d6..8d76bf19e03 100644 --- a/tests/unit/utils/test_gitfs.py +++ b/tests/unit/utils/test_gitfs.py @@ -125,13 +125,13 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): def test_get_cachedir_basename(self): self.assertEqual( self.main_class.remotes[0].get_cachedir_basename(), - "-b4dcbd51b08742ec23eaf96d192d29b417ec137ea7ca0c0de2515cfaf6e26860", + "-tNy9UbCHQuwj6vltGS0ptBfsE36nygwN4lFc+vbiaGA=", ) def test_get_cachedir_base_with_name(self): self.assertEqual( self.main_class.remotes[1].get_cachedir_basename(), - "repo2-4218c2f8e303c6ea24cc541d8748e523d5b443c3050170a43a1a00be253b56aa", + "repo2-QhjC+OMDxuokzFQdh0jlI9W0Q8MFAXCkOhoAviU7Vqo=", ) def test_git_provider_mp_lock(self): From 4d35e14a97d877b14f5180d8e17007b27b73cd0e Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Tue, 1 Aug 2023 19:19:39 -0500 Subject: [PATCH 34/73] clean up cache name --- salt/utils/gitfs.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 9c20b822229..3a38fa382ef 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -17,6 +17,7 @@ import os import shlex import shutil import stat +import string import subprocess import time import weakref @@ -469,14 +470,22 @@ class GitProvider: ) # We loaded this data from yaml configuration files, so, its safe # to use UTF-8 - log.error(hash_type(self._full_id.encode("utf-8")).hexdigest()) base64_hash = str( base64.b64encode(hash_type(self._full_id.encode("utf-8")).digest()), - encoding="utf-8", - ).replace("/", "_") + encoding="ascii", # base64 only outputs ascii + ).replace( + "/", "_" + ) # replace "/" with "_" to not cause trouble with file system + # limit name length to 19, so we don't eat up all the path length for windows # this is due to pygit2 limitations - self.cachedir_basename = f"{getattr(self, 'name', '')[:19]}-{base64_hash}" + # replace any unknown char with "_" to not cause trouble with file system + name_chars = string.ascii_letters + string.digits + "-" + cache_name = "".join( + c if c in name_chars else "_" for c in getattr(self, "name", "")[:19] + ) + + self.cachedir_basename = f"{cache_name}-{base64_hash}" self.cachedir = salt.utils.path.join(cache_root, self.cachedir_basename) self.linkdir = salt.utils.path.join(cache_root, "links", self.cachedir_basename) if not os.path.isdir(self.cachedir): From 006fd8827bfd612b00c24d37a2884580c0f7fcc4 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Tue, 1 Aug 2023 19:25:09 -0500 Subject: [PATCH 35/73] add small note --- tests/pytests/unit/utils/test_gitfs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py index cc1a920f402..92d4d08c05a 100644 --- a/tests/pytests/unit/utils/test_gitfs.py +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -255,6 +255,8 @@ def test_full_id_pygit2(_prepare_provider): ) def test_get_cachedir_basename_pygit2(_prepare_provider): basename = _prepare_provider.get_cachedir_basename() + # Note: if you are changing the length of basename + # keep in mind that pygit2 will error out on large file paths on Windows assert len(basename) == 45 assert basename[0] == "-" # check that a valid base64 is given '/' -> '_' From ef229a36a2db41fbd9b009b45172f61af1cb74f8 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 2 Aug 2023 14:11:17 -0500 Subject: [PATCH 36/73] stop branch collision --- salt/utils/gitfs.py | 12 ++++++++++-- tests/pytests/unit/utils/test_gitfs.py | 2 +- tests/unit/utils/test_gitfs.py | 8 ++++---- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 3a38fa382ef..015d93f36da 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -458,14 +458,22 @@ class GitProvider: failhard(self.role) hash_type = getattr(hashlib, self.opts.get("hash_type", "md5")) - # Generate full id. The full id is made from these parts name-id-env-_root. + # Generate full id. # Full id helps decrease the chances of collections in the gitfs cache. + try: + target = str(self.get_checkout_target()) + except AttributeError: + target = "" self._full_id = "-".join( [ getattr(self, "name", ""), - self.id.replace(" ", "-"), + self.id, getattr(self, "env", ""), getattr(self, "_root", ""), + self.role, + getattr(self, "base", ""), + getattr(self, "branch", ""), + target ] ) # We loaded this data from yaml configuration files, so, its safe diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py index 92d4d08c05a..76c9409a1af 100644 --- a/tests/pytests/unit/utils/test_gitfs.py +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -246,7 +246,7 @@ def test_checkout_pygit2(_prepare_provider): ) def test_full_id_pygit2(_prepare_provider): assert _prepare_provider.full_id().startswith("-") - assert _prepare_provider.full_id().endswith("/pygit2-repo--") + assert _prepare_provider.full_id().endswith("/pygit2-repo---gitfs-master--") @pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py index 8d76bf19e03..563599a5d56 100644 --- a/tests/unit/utils/test_gitfs.py +++ b/tests/unit/utils/test_gitfs.py @@ -115,23 +115,23 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): self.assertFalse(self.main_class.remotes[1].fetched) def test_full_id(self): - self.assertEqual(self.main_class.remotes[0].full_id(), "-file://repo1.git--") + self.assertEqual(self.main_class.remotes[0].full_id(), "-file://repo1.git---gitfs-master--") def test_full_id_with_name(self): self.assertEqual( - self.main_class.remotes[1].full_id(), "repo2-file://repo2.git--" + self.main_class.remotes[1].full_id(), "repo2-file://repo2.git---gitfs-master--" ) def test_get_cachedir_basename(self): self.assertEqual( self.main_class.remotes[0].get_cachedir_basename(), - "-tNy9UbCHQuwj6vltGS0ptBfsE36nygwN4lFc+vbiaGA=", + "-jXhnbGDemchtZwTwaD2s6VOaVvs98a7w+AtiYlmOVb0=", ) def test_get_cachedir_base_with_name(self): self.assertEqual( self.main_class.remotes[1].get_cachedir_basename(), - "repo2-QhjC+OMDxuokzFQdh0jlI9W0Q8MFAXCkOhoAviU7Vqo=", + "repo2-nuezpiDtjQRFC0ZJDByvi+F6Vb8ZhfoH41n_KFxTGsU=", ) def test_git_provider_mp_lock(self): From cd0f5a2045839b1c9b45aacddecf3eed66b63d14 Mon Sep 17 00:00:00 2001 From: cmcmarrow Date: Wed, 2 Aug 2023 14:36:42 -0500 Subject: [PATCH 37/73] run pre --- salt/utils/gitfs.py | 2 +- tests/unit/utils/test_gitfs.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 015d93f36da..d7774bacd3b 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -473,7 +473,7 @@ class GitProvider: self.role, getattr(self, "base", ""), getattr(self, "branch", ""), - target + target, ] ) # We loaded this data from yaml configuration files, so, its safe diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py index 563599a5d56..6d8e97a239e 100644 --- a/tests/unit/utils/test_gitfs.py +++ b/tests/unit/utils/test_gitfs.py @@ -115,11 +115,14 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): self.assertFalse(self.main_class.remotes[1].fetched) def test_full_id(self): - self.assertEqual(self.main_class.remotes[0].full_id(), "-file://repo1.git---gitfs-master--") + self.assertEqual( + self.main_class.remotes[0].full_id(), "-file://repo1.git---gitfs-master--" + ) def test_full_id_with_name(self): self.assertEqual( - self.main_class.remotes[1].full_id(), "repo2-file://repo2.git---gitfs-master--" + self.main_class.remotes[1].full_id(), + "repo2-file://repo2.git---gitfs-master--", ) def test_get_cachedir_basename(self): From a5ba5b5c5c020407c29fffed69c0593acd2caa7f Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 4 Aug 2023 10:29:47 -0400 Subject: [PATCH 38/73] Update relenv to `0.13.3` --- .github/workflows/ci.yml | 12 ++++++------ .github/workflows/nightly.yml | 12 ++++++------ .github/workflows/scheduled.yml | 12 ++++++------ .github/workflows/staging.yml | 12 ++++++------ cicd/shared-gh-workflows-context.yml | 2 +- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f36a04cf082..ed1da2442e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -442,7 +442,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-salt-onedir: @@ -458,7 +458,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-rpm-pkgs: @@ -470,7 +470,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-deb-pkgs: @@ -482,7 +482,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-windows-pkgs: @@ -494,7 +494,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-macos-pkgs: @@ -506,7 +506,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" amazonlinux-2-pkg-tests: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e5971fbbc81..e5944ed5b9f 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -491,7 +491,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-salt-onedir: @@ -507,7 +507,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-rpm-pkgs: @@ -519,7 +519,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-deb-pkgs: @@ -531,7 +531,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-windows-pkgs: @@ -543,7 +543,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" environment: nightly sign-packages: false @@ -558,7 +558,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" environment: nightly sign-packages: true diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index e6dd4bcd94d..7dfa7db3274 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -476,7 +476,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-salt-onedir: @@ -492,7 +492,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-rpm-pkgs: @@ -504,7 +504,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-deb-pkgs: @@ -516,7 +516,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-windows-pkgs: @@ -528,7 +528,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-macos-pkgs: @@ -540,7 +540,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" amazonlinux-2-pkg-tests: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 9666fd465f4..185d274db47 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -482,7 +482,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-salt-onedir: @@ -498,7 +498,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-rpm-pkgs: @@ -510,7 +510,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-deb-pkgs: @@ -522,7 +522,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" build-windows-pkgs: @@ -534,7 +534,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" environment: staging sign-packages: ${{ inputs.sign-windows-packages }} @@ -549,7 +549,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.2" + relenv-version: "0.13.3" python-version: "3.10.12" environment: staging sign-packages: true diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index 846d5625320..da2158b216a 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,2 +1,2 @@ python_version: "3.10.12" -relenv_version: "0.13.2" +relenv_version: "0.13.3" From 3095f1cea27d74f61ea67d305ea06db6deeb77a1 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 7 Aug 2023 18:30:32 +0100 Subject: [PATCH 39/73] Allow not publishing to test pypi during staging Signed-off-by: Pedro Algarvio --- .github/workflows/staging.yml | 6 +++++- .github/workflows/templates/staging.yml.jinja | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 185d274db47..be2b1709e07 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -18,6 +18,10 @@ on: type: boolean default: false description: Sign Windows Packages + skip-test-pypi-publish: + type: boolean + default: false + description: Skip publishing the source package to Test PyPi(For example, CVE releases) skip-salt-test-suite: type: boolean default: false @@ -2686,7 +2690,7 @@ jobs: publish-pypi: name: Publish to PyPi(test) - if: ${{ github.event.repository.fork != true }} + if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index fdb1b5933f3..c55377b98cc 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -31,6 +31,10 @@ on: type: boolean default: false description: Sign Windows Packages + skip-test-pypi-publish: + type: boolean + default: false + description: Skip publishing the source package to Test PyPi(For example, CVE releases) skip-salt-test-suite: type: boolean default: false @@ -160,7 +164,7 @@ concurrency: publish-pypi: <%- do conclusion_needs.append('publish-pypi') %> name: Publish to PyPi(test) - if: ${{ github.event.repository.fork != true }} + if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts From b52bf14dbf71d066291cf54df81a86c0a1192110 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Fri, 28 Jul 2023 14:39:10 -0700 Subject: [PATCH 40/73] Look in location salt is running from, this accounts for running from an unpacked onedir file that has not been installed. --- salt/utils/rsax931.py | 6 ++++++ tests/unit/utils/test_rsax931.py | 27 +++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/salt/utils/rsax931.py b/salt/utils/rsax931.py index 0eebf999446..535e7f6d3e6 100644 --- a/salt/utils/rsax931.py +++ b/salt/utils/rsax931.py @@ -37,6 +37,12 @@ def _find_libcrypto(): # system. # look in salts pkg install location. lib = glob.glob("/opt/salt/lib/libcrypto.dylib") + + # look in location salt is running from + # this accounts for running from an unpacked + # onedir file + lib = lib or glob.glob(os.path.join("lib/libcrypto.dylib")) + # Find library symlinks in Homebrew locations. brew_prefix = os.getenv("HOMEBREW_PREFIX", "/usr/local") lib = lib or glob.glob( diff --git a/tests/unit/utils/test_rsax931.py b/tests/unit/utils/test_rsax931.py index 67946d4bf5b..7fed394dcee 100644 --- a/tests/unit/utils/test_rsax931.py +++ b/tests/unit/utils/test_rsax931.py @@ -276,3 +276,30 @@ class RSAX931Test(TestCase): or hasattr(lib, "OPENSSL_init_crypto") or hasattr(lib, "OPENSSL_no_config") ) + + @patch.object(salt.utils.platform, "is_darwin", lambda: True) + @patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")) + @patch.object(sys, "platform", "macosx") + def test_find_libcrypto_darwin_onedir(self): + """ + Test _find_libcrypto on a macOS + libcryptos and defaulting to the versioned system libraries. + """ + available = [ + "/usr/lib/libcrypto.0.9.7.dylib", + "/usr/lib/libcrypto.0.9.8.dylib", + "/usr/lib/libcrypto.35.dylib", + "/usr/lib/libcrypto.41.dylib", + "/usr/lib/libcrypto.42.dylib", + "/usr/lib/libcrypto.44.dylib", + "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", + "/opt/local/lib/libcrypto.dylib", + "lib/libcrypto.dylib", + ] + + def test_glob(pattern): + return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] + + with patch.object(glob, "glob", test_glob): + lib_path = _find_libcrypto() + self.assertEqual("lib/libcrypto.dylib", lib_path) From 9a5a8cb7e629274fe333b6decf9bd70a6a86b3c2 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Fri, 28 Jul 2023 14:41:10 -0700 Subject: [PATCH 41/73] Adding changelog. --- changelog/64877.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/64877.fixed.md diff --git a/changelog/64877.fixed.md b/changelog/64877.fixed.md new file mode 100644 index 00000000000..ccf4e556880 --- /dev/null +++ b/changelog/64877.fixed.md @@ -0,0 +1 @@ +Look in location salt is running from, this accounts for running from an unpacked onedir file that has not been installed. From e32e1f7fcd162623c4fe7e9bee52191e92d9e228 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Sun, 30 Jul 2023 13:30:28 -0700 Subject: [PATCH 42/73] removing unused os.path.join. moving all patch decorators into the test functions. --- salt/utils/rsax931.py | 2 +- tests/unit/utils/test_rsax931.py | 77 +++++++++++++++++--------------- 2 files changed, 42 insertions(+), 37 deletions(-) diff --git a/salt/utils/rsax931.py b/salt/utils/rsax931.py index 535e7f6d3e6..fb8a1cbbd74 100644 --- a/salt/utils/rsax931.py +++ b/salt/utils/rsax931.py @@ -41,7 +41,7 @@ def _find_libcrypto(): # look in location salt is running from # this accounts for running from an unpacked # onedir file - lib = lib or glob.glob(os.path.join("lib/libcrypto.dylib")) + lib = lib or glob.glob("lib/libcrypto.dylib") # Find library symlinks in Homebrew locations. brew_prefix = os.getenv("HOMEBREW_PREFIX", "/usr/local") diff --git a/tests/unit/utils/test_rsax931.py b/tests/unit/utils/test_rsax931.py index 7fed394dcee..cbd1ffbc8ed 100644 --- a/tests/unit/utils/test_rsax931.py +++ b/tests/unit/utils/test_rsax931.py @@ -169,21 +169,19 @@ class RSAX931Test(TestCase): fnmatch.fnmatch(lib_path, "/opt/freeware/lib/libcrypto.so*") ) - @patch.object(salt.utils.platform, "is_darwin", lambda: True) - @patch.object(platform, "mac_ver", lambda: ("10.14.2", (), "")) - @patch.object(glob, "glob", lambda _: []) - @patch.object(sys, "platform", "macosx") def test_find_libcrypto_with_system_before_catalina(self): """ Test _find_libcrypto on a pre-Catalina macOS host by simulating not finding any other libcryptos and verifying that it defaults to system. """ - lib_path = _find_libcrypto() - self.assertEqual(lib_path, "/usr/lib/libcrypto.dylib") + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("10.14.2", (), "") + ), patch.object(glob, "glob", lambda _: []), patch.object( + sys, "platform", "macosx" + ): + lib_path = _find_libcrypto() + self.assertEqual(lib_path, "/usr/lib/libcrypto.dylib") - @patch.object(salt.utils.platform, "is_darwin", lambda: True) - @patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")) - @patch.object(sys, "platform", "macosx") def test_find_libcrypto_darwin_catalina(self): """ Test _find_libcrypto on a macOS Catalina host where there are no custom @@ -202,13 +200,14 @@ class RSAX931Test(TestCase): def test_glob(pattern): return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] - with patch.object(glob, "glob", test_glob): + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("10.15.2", (), "") + ), patch.object(sys, "platform", "macosx"), patch.object( + glob, "glob", test_glob + ): lib_path = _find_libcrypto() self.assertEqual("/usr/lib/libcrypto.44.dylib", lib_path) - @patch.object(salt.utils.platform, "is_darwin", lambda: True) - @patch.object(platform, "mac_ver", lambda: ("11.2.2", (), "")) - @patch.object(sys, "platform", "macosx") def test_find_libcrypto_darwin_bigsur_packaged(self): """ Test _find_libcrypto on a Darwin-like macOS host where there isn't a @@ -237,31 +236,36 @@ class RSAX931Test(TestCase): return test_glob - for package_manager, expected_lib in managed_paths.items(): - if package_manager == "brew": - env = {"HOMEBREW_PREFIX": "/test/homebrew/prefix"} - else: - env = {"HOMEBREW_PREFIX": ""} - with patch.object(os, "getenv", mock_getenv(env)): - with patch.object(glob, "glob", mock_glob(expected_lib)): + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("11.2.2", (), "") + ), patch.object(sys, "platform", "macosx"): + for package_manager, expected_lib in managed_paths.items(): + if package_manager == "brew": + env = {"HOMEBREW_PREFIX": "/test/homebrew/prefix"} + else: + env = {"HOMEBREW_PREFIX": ""} + with patch.object(os, "getenv", mock_getenv(env)): + with patch.object(glob, "glob", mock_glob(expected_lib)): + lib_path = _find_libcrypto() + + self.assertEqual(expected_lib, lib_path) + + # On Big Sur, there's nothing else to fall back on. + with patch.object(glob, "glob", lambda _: []): + with self.assertRaises(OSError): lib_path = _find_libcrypto() - self.assertEqual(expected_lib, lib_path) - - # On Big Sur, there's nothing else to fall back on. - with patch.object(glob, "glob", lambda _: []): - with self.assertRaises(OSError): - lib_path = _find_libcrypto() - - @patch.object(ctypes.util, "find_library", lambda a: None) - @patch.object(glob, "glob", lambda a: []) - @patch.object(sys, "platform", "unknown") - @patch.object(salt.utils.platform, "is_darwin", lambda: False) def test_find_libcrypto_unsupported(self): """ Ensure that _find_libcrypto works correctly on an unsupported host OS. """ - with self.assertRaises(OSError): + with patch.object(ctypes.util, "find_library", lambda a: None), patch.object( + glob, "glob", lambda a: [] + ), patch.object(sys, "platform", "unknown"), patch.object( + salt.utils.platform, "is_darwin", lambda: False + ), self.assertRaises( + OSError + ): _find_libcrypto() def test_load_libcrypto(self): @@ -277,9 +281,6 @@ class RSAX931Test(TestCase): or hasattr(lib, "OPENSSL_no_config") ) - @patch.object(salt.utils.platform, "is_darwin", lambda: True) - @patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")) - @patch.object(sys, "platform", "macosx") def test_find_libcrypto_darwin_onedir(self): """ Test _find_libcrypto on a macOS @@ -300,6 +301,10 @@ class RSAX931Test(TestCase): def test_glob(pattern): return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] - with patch.object(glob, "glob", test_glob): + with patch.object(glob, "glob", test_glob), patch.object( + salt.utils.platform, "is_darwin", lambda: True + ), patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")), patch.object( + sys, "platform", "macosx" + ): lib_path = _find_libcrypto() self.assertEqual("lib/libcrypto.dylib", lib_path) From a8cf6515a09b00e07e1bb803696d18d59782f595 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Mon, 31 Jul 2023 10:47:00 -0700 Subject: [PATCH 43/73] moving test to pytest. --- tests/pytests/unit/utils/test_rsax931.py | 322 +++++++++++++++++++++++ tests/unit/utils/test_rsax931.py | 310 ---------------------- 2 files changed, 322 insertions(+), 310 deletions(-) create mode 100644 tests/pytests/unit/utils/test_rsax931.py delete mode 100644 tests/unit/utils/test_rsax931.py diff --git a/tests/pytests/unit/utils/test_rsax931.py b/tests/pytests/unit/utils/test_rsax931.py new file mode 100644 index 00000000000..75d176d7020 --- /dev/null +++ b/tests/pytests/unit/utils/test_rsax931.py @@ -0,0 +1,322 @@ +""" +Test the RSA ANSI X9.31 signer and verifier +""" + +import ctypes +import ctypes.util +import fnmatch +import glob +import os +import platform +import sys + +import pytest + +import salt.utils.platform + +# salt libs +from salt.utils.rsax931 import ( + RSAX931Signer, + RSAX931Verifier, + _find_libcrypto, + _load_libcrypto, +) +from tests.support.mock import patch + + +@pytest.fixture +def privkey_data(): + return ( + "-----BEGIN RSA PRIVATE KEY-----\n" + "MIIEpAIBAAKCAQEA75GR6ZTv5JOv90Vq8tKhKC7YQnhDIo2hM0HVziTEk5R4UQBW\n" + "a0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD4ZMsYqLzqjWMekLC8bjhxc+EuPo9\n" + "Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R4hOcMMZNZdi0xLtFoTfwU61UPfFX\n" + "14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttLP3sMXJvc3EvM0JiDVj4l1TWFUHHz\n" + "eFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k6ai4tVzwkTmV5PsriP1ju88Lo3MB\n" + "4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWHAQIDAQABAoIBAGOzBzBYZUWRGOgl\n" + "IY8QjTT12dY/ymC05GM6gMobjxuD7FZ5d32HDLu/QrknfS3kKlFPUQGDAbQhbbb0\n" + "zw6VL5NO9mfOPO2W/3FaG1sRgBQcerWonoSSSn8OJwVBHMFLG3a+U1Zh1UvPoiPK\n" + "S734swIM+zFpNYivGPvOm/muF/waFf8tF/47t1cwt/JGXYQnkG/P7z0vp47Irpsb\n" + "Yjw7vPe4BnbY6SppSxscW3KoV7GtJLFKIxAXbxsuJMF/rYe3O3w2VKJ1Sug1VDJl\n" + "/GytwAkSUer84WwP2b07Wn4c5pCnmLslMgXCLkENgi1NnJMhYVOnckxGDZk54hqP\n" + "9RbLnkkCgYEA/yKuWEvgdzYRYkqpzB0l9ka7Y00CV4Dha9Of6GjQi9i4VCJ/UFVr\n" + "UlhTo5y0ZzpcDAPcoZf5CFZsD90a/BpQ3YTtdln2MMCL/Kr3QFmetkmDrt+3wYnX\n" + "sKESfsa2nZdOATRpl1antpwyD4RzsAeOPwBiACj4fkq5iZJBSI0bxrMCgYEA8GFi\n" + "qAjgKh81/Uai6KWTOW2kX02LEMVRrnZLQ9VPPLGid4KZDDk1/dEfxjjkcyOxX1Ux\n" + "Klu4W8ZEdZyzPcJrfk7PdopfGOfrhWzkREK9C40H7ou/1jUecq/STPfSOmxh3Y+D\n" + "ifMNO6z4sQAHx8VaHaxVsJ7SGR/spr0pkZL+NXsCgYEA84rIgBKWB1W+TGRXJzdf\n" + "yHIGaCjXpm2pQMN3LmP3RrcuZWm0vBt94dHcrR5l+u/zc6iwEDTAjJvqdU4rdyEr\n" + "tfkwr7v6TNlQB3WvpWanIPyVzfVSNFX/ZWSsAgZvxYjr9ixw6vzWBXOeOb/Gqu7b\n" + "cvpLkjmJ0wxDhbXtyXKhZA8CgYBZyvcQb+hUs732M4mtQBSD0kohc5TsGdlOQ1AQ\n" + "McFcmbpnzDghkclyW8jzwdLMk9uxEeDAwuxWE/UEvhlSi6qdzxC+Zifp5NBc0fVe\n" + "7lMx2mfJGxj5CnSqQLVdHQHB4zSXkAGB6XHbBd0MOUeuvzDPfs2voVQ4IG3FR0oc\n" + "3/znuwKBgQChZGH3McQcxmLA28aUwOVbWssfXKdDCsiJO+PEXXlL0maO3SbnFn+Q\n" + "Tyf8oHI5cdP7AbwDSx9bUfRPjg9dKKmATBFr2bn216pjGxK0OjYOCntFTVr0psRB\n" + "CrKg52Qrq71/2l4V2NLQZU40Dr1bN9V+Ftd9L0pvpCAEAWpIbLXGDw==\n" + "-----END RSA PRIVATE KEY-----" + ) + + +@pytest.fixture +def pubkey_data(): + return ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA75GR6ZTv5JOv90Vq8tKh\n" + "KC7YQnhDIo2hM0HVziTEk5R4UQBWa0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD\n" + "4ZMsYqLzqjWMekLC8bjhxc+EuPo9Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R\n" + "4hOcMMZNZdi0xLtFoTfwU61UPfFX14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttL\n" + "P3sMXJvc3EvM0JiDVj4l1TWFUHHzeFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k\n" + "6ai4tVzwkTmV5PsriP1ju88Lo3MB4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWH\n" + "AQIDAQAB\n" + "-----END PUBLIC KEY-----" + ) + + +@pytest.fixture +def hello_world(): + return b"hello, world" + + +@pytest.fixture +def hello_world_sig(): + return ( + b"\x63\xa0\x70\xd2\xe4\xd4\x6b\x8a\xa2\x59\x27\x5f\x00\x69" + b"\x1e\x3c\x50\xed\x50\x13\x09\x80\xe3\x47\x4e\x14\xb5\x7c" + b"\x07\x26\x4e\x20\x74\xea\x0e\xf8\xda\xff\x1e\x57\x8c\x67" + b"\x76\x73\xaa\xea\x0f\x0a\xe7\xa2\xe3\x88\xfc\x09\x87\x36" + b"\x01\x3a\xb7\x4c\x40\xe0\xf4\x54\xc5\xf1\xaa\xb2\x1d\x7f" + b"\xb6\xd3\xa8\xdd\x28\x69\x8b\x88\xe4\x42\x1e\x48\x3e\x1f" + b"\xe2\x2b\x3c\x7c\x85\x11\xe9\x59\xd7\xf3\xc2\x21\xd3\x55" + b"\xcb\x9c\x3c\x93\xcc\x20\xdf\x64\x81\xd0\x0d\xbf\x8e\x8d" + b"\x47\xec\x1d\x9e\x27\xec\x12\xed\x8b\x5f\xd6\x1d\xec\x8d" + b"\x77\x5a\x58\x8a\x24\xb6\x0f\x12\xb7\x51\xef\x7d\x85\x0f" + b"\x49\x39\x02\x81\x15\x08\x70\xd6\xe0\x0b\x31\xff\x5f\xf9" + b"\xd1\x92\x38\x59\x8c\x22\x9c\xbb\xbf\xcf\x85\x34\xe2\x47" + b"\xf5\xe2\xaa\xb4\x62\x33\x3c\x13\x78\x33\x87\x08\x9e\xb5" + b"\xbc\x5d\xc1\xbf\x79\x7c\xfa\x5f\x06\x6a\x3b\x17\x40\x09" + b"\xb9\x09\xbf\x32\xc3\x00\xe2\xbc\x91\x77\x14\xa5\x23\xf5" + b"\xf5\xf1\x09\x12\x38\xda\x3b\x6a\x82\x81\x7b\x5e\x1c\xcb" + b"\xaa\x36\x9b\x08\x36\x03\x14\x96\xa3\x31\x39\x59\x16\x75" + b"\xc9\xb6\x66\x94\x1b\x97\xff\xc8\xa1\xe3\x21\x35\x23\x06" + b"\x4c\x9b\xf4\xee" + ) + + +def test_signer(privkey_data, pubkey_data, hello_world, hello_world_sig): + with pytest.raises(ValueError): + signer = RSAX931Signer("bogus key data") + with pytest.raises(ValueError): + signer = RSAX931Signer(pubkey_data) + + signer = RSAX931Signer(privkey_data) + with pytest.raises(ValueError): + signer.sign("x" * 255) # message too long + + sig = signer.sign(hello_world) + assert hello_world_sig == sig + + +def test_verifier(privkey_data, pubkey_data, hello_world, hello_world_sig): + with pytest.raises(ValueError): + verifier = RSAX931Verifier("bogus key data") + with pytest.raises(ValueError): + verifier = RSAX931Verifier(privkey_data) + + verifier = RSAX931Verifier(pubkey_data) + with pytest.raises(ValueError): + verifier.verify("") + with pytest.raises(ValueError): + verifier.verify(hello_world_sig + b"junk") + + msg = verifier.verify(hello_world_sig) + assert hello_world == msg + + +@pytest.mark.skip_unless_on_windows +def test_find_libcrypto_win32(): + """ + Test _find_libcrypto on Windows hosts. + """ + lib_path = _find_libcrypto() + assert "libcrypto" in lib_path + + +@pytest.mark.skip_unless_on_smartos +def test_find_libcrypto_smartos(): + """ + Test _find_libcrypto on a SmartOS host. + """ + lib_path = _find_libcrypto() + assert fnmatch.fnmatch( + lib_path, os.path.join(os.path.dirname(sys.executable), "libcrypto*") + ) + + +@pytest.mark.skip_unless_on_sunos +def test_find_libcrypto_sunos(): + """ + Test _find_libcrypto on a Solaris-like host. + """ + lib_path = _find_libcrypto() + passed = False + for i in ("/opt/local/lib/libcrypto.so*", "/opt/tools/lib/libcrypto.so*"): + if fnmatch.fnmatch(lib_path, i): + passed = True + break + assert passed + + +@pytest.mark.skip_unless_on_aix +def test_find_libcrypto_aix(): + """ + Test _find_libcrypto on an IBM AIX host. + """ + lib_path = _find_libcrypto() + if os.path.isdir("/opt/salt/lib"): + assert fnmatch.fnmatch(lib_path, "/opt/salt/lib/libcrypto.so*") + else: + assert fnmatch.fnmatch(lib_path, "/opt/freeware/lib/libcrypto.so*") + + +def test_find_libcrypto_with_system_before_catalina(): + """ + Test _find_libcrypto on a pre-Catalina macOS host by simulating not + finding any other libcryptos and verifying that it defaults to system. + """ + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("10.14.2", (), "") + ), patch.object(glob, "glob", lambda _: []), patch.object( + sys, "platform", "macosx" + ): + lib_path = _find_libcrypto() + assert lib_path == "/usr/lib/libcrypto.dylib" + + +def test_find_libcrypto_darwin_catalina(): + """ + Test _find_libcrypto on a macOS Catalina host where there are no custom + libcryptos and defaulting to the versioned system libraries. + """ + available = [ + "/usr/lib/libcrypto.0.9.7.dylib", + "/usr/lib/libcrypto.0.9.8.dylib", + "/usr/lib/libcrypto.35.dylib", + "/usr/lib/libcrypto.41.dylib", + "/usr/lib/libcrypto.42.dylib", + "/usr/lib/libcrypto.44.dylib", + "/usr/lib/libcrypto.dylib", + ] + + def test_glob(pattern): + return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] + + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("10.15.2", (), "") + ), patch.object(sys, "platform", "macosx"), patch.object(glob, "glob", test_glob): + lib_path = _find_libcrypto() + assert "/usr/lib/libcrypto.44.dylib" == lib_path + + +def test_find_libcrypto_darwin_bigsur_packaged(): + """ + Test _find_libcrypto on a Darwin-like macOS host where there isn't a + lacation returned by ctypes.util.find_library() and the libcrypto + installation comes from a package manager (ports, brew, salt). + """ + managed_paths = { + "salt": "/opt/salt/lib/libcrypto.dylib", + "brew": "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", + "port": "/opt/local/lib/libcrypto.dylib", + } + + saved_getenv = os.getenv + + def mock_getenv(env): + def test_getenv(var, default=None): + return env.get(var, saved_getenv(var, default)) + + return test_getenv + + def mock_glob(expected_lib): + def test_glob(pattern): + if fnmatch.fnmatch(expected_lib, pattern): + return [expected_lib] + return [] + + return test_glob + + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("11.2.2", (), "") + ), patch.object(sys, "platform", "macosx"): + for package_manager, expected_lib in managed_paths.items(): + if package_manager == "brew": + env = {"HOMEBREW_PREFIX": "/test/homebrew/prefix"} + else: + env = {"HOMEBREW_PREFIX": ""} + with patch.object(os, "getenv", mock_getenv(env)): + with patch.object(glob, "glob", mock_glob(expected_lib)): + lib_path = _find_libcrypto() + + assert expected_lib == lib_path + + # On Big Sur, there's nothing else to fall back on. + with patch.object(glob, "glob", lambda _: []): + with pytest.raises(OSError): + lib_path = _find_libcrypto() + + +def test_find_libcrypto_unsupported(): + """ + Ensure that _find_libcrypto works correctly on an unsupported host OS. + """ + with patch.object(ctypes.util, "find_library", lambda a: None), patch.object( + glob, "glob", lambda a: [] + ), patch.object(sys, "platform", "unknown"), patch.object( + salt.utils.platform, "is_darwin", lambda: False + ), pytest.raises( + OSError + ): + _find_libcrypto() + + +def test_load_libcrypto(): + """ + Test _load_libcrypto generically. + """ + lib = _load_libcrypto() + assert isinstance(lib, ctypes.CDLL) + # Try to cover both pre and post OpenSSL 1.1. + assert ( + hasattr(lib, "OpenSSL_version_num") + or hasattr(lib, "OPENSSL_init_crypto") + or hasattr(lib, "OPENSSL_no_config") + ) + + +def test_find_libcrypto_darwin_onedir(): + """ + Test _find_libcrypto on a macOS + libcryptos and defaulting to the versioned system libraries. + """ + available = [ + "/usr/lib/libcrypto.0.9.7.dylib", + "/usr/lib/libcrypto.0.9.8.dylib", + "/usr/lib/libcrypto.35.dylib", + "/usr/lib/libcrypto.41.dylib", + "/usr/lib/libcrypto.42.dylib", + "/usr/lib/libcrypto.44.dylib", + "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", + "/opt/local/lib/libcrypto.dylib", + "lib/libcrypto.dylib", + ] + + def test_glob(pattern): + return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] + + with patch.object(glob, "glob", test_glob), patch.object( + salt.utils.platform, "is_darwin", lambda: True + ), patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")), patch.object( + sys, "platform", "macosx" + ): + lib_path = _find_libcrypto() + assert "lib/libcrypto.dylib" == lib_path diff --git a/tests/unit/utils/test_rsax931.py b/tests/unit/utils/test_rsax931.py deleted file mode 100644 index cbd1ffbc8ed..00000000000 --- a/tests/unit/utils/test_rsax931.py +++ /dev/null @@ -1,310 +0,0 @@ -""" -Test the RSA ANSI X9.31 signer and verifier -""" - -import ctypes -import ctypes.util -import fnmatch -import glob -import os -import platform -import sys - -import pytest - -import salt.utils.platform - -# salt libs -from salt.utils.rsax931 import ( - RSAX931Signer, - RSAX931Verifier, - _find_libcrypto, - _load_libcrypto, -) -from tests.support.mock import patch - -# salt testing libs -from tests.support.unit import TestCase - - -class RSAX931Test(TestCase): - - privkey_data = ( - "-----BEGIN RSA PRIVATE KEY-----\n" - "MIIEpAIBAAKCAQEA75GR6ZTv5JOv90Vq8tKhKC7YQnhDIo2hM0HVziTEk5R4UQBW\n" - "a0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD4ZMsYqLzqjWMekLC8bjhxc+EuPo9\n" - "Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R4hOcMMZNZdi0xLtFoTfwU61UPfFX\n" - "14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttLP3sMXJvc3EvM0JiDVj4l1TWFUHHz\n" - "eFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k6ai4tVzwkTmV5PsriP1ju88Lo3MB\n" - "4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWHAQIDAQABAoIBAGOzBzBYZUWRGOgl\n" - "IY8QjTT12dY/ymC05GM6gMobjxuD7FZ5d32HDLu/QrknfS3kKlFPUQGDAbQhbbb0\n" - "zw6VL5NO9mfOPO2W/3FaG1sRgBQcerWonoSSSn8OJwVBHMFLG3a+U1Zh1UvPoiPK\n" - "S734swIM+zFpNYivGPvOm/muF/waFf8tF/47t1cwt/JGXYQnkG/P7z0vp47Irpsb\n" - "Yjw7vPe4BnbY6SppSxscW3KoV7GtJLFKIxAXbxsuJMF/rYe3O3w2VKJ1Sug1VDJl\n" - "/GytwAkSUer84WwP2b07Wn4c5pCnmLslMgXCLkENgi1NnJMhYVOnckxGDZk54hqP\n" - "9RbLnkkCgYEA/yKuWEvgdzYRYkqpzB0l9ka7Y00CV4Dha9Of6GjQi9i4VCJ/UFVr\n" - "UlhTo5y0ZzpcDAPcoZf5CFZsD90a/BpQ3YTtdln2MMCL/Kr3QFmetkmDrt+3wYnX\n" - "sKESfsa2nZdOATRpl1antpwyD4RzsAeOPwBiACj4fkq5iZJBSI0bxrMCgYEA8GFi\n" - "qAjgKh81/Uai6KWTOW2kX02LEMVRrnZLQ9VPPLGid4KZDDk1/dEfxjjkcyOxX1Ux\n" - "Klu4W8ZEdZyzPcJrfk7PdopfGOfrhWzkREK9C40H7ou/1jUecq/STPfSOmxh3Y+D\n" - "ifMNO6z4sQAHx8VaHaxVsJ7SGR/spr0pkZL+NXsCgYEA84rIgBKWB1W+TGRXJzdf\n" - "yHIGaCjXpm2pQMN3LmP3RrcuZWm0vBt94dHcrR5l+u/zc6iwEDTAjJvqdU4rdyEr\n" - "tfkwr7v6TNlQB3WvpWanIPyVzfVSNFX/ZWSsAgZvxYjr9ixw6vzWBXOeOb/Gqu7b\n" - "cvpLkjmJ0wxDhbXtyXKhZA8CgYBZyvcQb+hUs732M4mtQBSD0kohc5TsGdlOQ1AQ\n" - "McFcmbpnzDghkclyW8jzwdLMk9uxEeDAwuxWE/UEvhlSi6qdzxC+Zifp5NBc0fVe\n" - "7lMx2mfJGxj5CnSqQLVdHQHB4zSXkAGB6XHbBd0MOUeuvzDPfs2voVQ4IG3FR0oc\n" - "3/znuwKBgQChZGH3McQcxmLA28aUwOVbWssfXKdDCsiJO+PEXXlL0maO3SbnFn+Q\n" - "Tyf8oHI5cdP7AbwDSx9bUfRPjg9dKKmATBFr2bn216pjGxK0OjYOCntFTVr0psRB\n" - "CrKg52Qrq71/2l4V2NLQZU40Dr1bN9V+Ftd9L0pvpCAEAWpIbLXGDw==\n" - "-----END RSA PRIVATE KEY-----" - ) - - pubkey_data = ( - "-----BEGIN PUBLIC KEY-----\n" - "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA75GR6ZTv5JOv90Vq8tKh\n" - "KC7YQnhDIo2hM0HVziTEk5R4UQBWa0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD\n" - "4ZMsYqLzqjWMekLC8bjhxc+EuPo9Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R\n" - "4hOcMMZNZdi0xLtFoTfwU61UPfFX14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttL\n" - "P3sMXJvc3EvM0JiDVj4l1TWFUHHzeFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k\n" - "6ai4tVzwkTmV5PsriP1ju88Lo3MB4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWH\n" - "AQIDAQAB\n" - "-----END PUBLIC KEY-----" - ) - - hello_world = b"hello, world" - - hello_world_sig = ( - b"\x63\xa0\x70\xd2\xe4\xd4\x6b\x8a\xa2\x59\x27\x5f\x00\x69" - b"\x1e\x3c\x50\xed\x50\x13\x09\x80\xe3\x47\x4e\x14\xb5\x7c" - b"\x07\x26\x4e\x20\x74\xea\x0e\xf8\xda\xff\x1e\x57\x8c\x67" - b"\x76\x73\xaa\xea\x0f\x0a\xe7\xa2\xe3\x88\xfc\x09\x87\x36" - b"\x01\x3a\xb7\x4c\x40\xe0\xf4\x54\xc5\xf1\xaa\xb2\x1d\x7f" - b"\xb6\xd3\xa8\xdd\x28\x69\x8b\x88\xe4\x42\x1e\x48\x3e\x1f" - b"\xe2\x2b\x3c\x7c\x85\x11\xe9\x59\xd7\xf3\xc2\x21\xd3\x55" - b"\xcb\x9c\x3c\x93\xcc\x20\xdf\x64\x81\xd0\x0d\xbf\x8e\x8d" - b"\x47\xec\x1d\x9e\x27\xec\x12\xed\x8b\x5f\xd6\x1d\xec\x8d" - b"\x77\x5a\x58\x8a\x24\xb6\x0f\x12\xb7\x51\xef\x7d\x85\x0f" - b"\x49\x39\x02\x81\x15\x08\x70\xd6\xe0\x0b\x31\xff\x5f\xf9" - b"\xd1\x92\x38\x59\x8c\x22\x9c\xbb\xbf\xcf\x85\x34\xe2\x47" - b"\xf5\xe2\xaa\xb4\x62\x33\x3c\x13\x78\x33\x87\x08\x9e\xb5" - b"\xbc\x5d\xc1\xbf\x79\x7c\xfa\x5f\x06\x6a\x3b\x17\x40\x09" - b"\xb9\x09\xbf\x32\xc3\x00\xe2\xbc\x91\x77\x14\xa5\x23\xf5" - b"\xf5\xf1\x09\x12\x38\xda\x3b\x6a\x82\x81\x7b\x5e\x1c\xcb" - b"\xaa\x36\x9b\x08\x36\x03\x14\x96\xa3\x31\x39\x59\x16\x75" - b"\xc9\xb6\x66\x94\x1b\x97\xff\xc8\xa1\xe3\x21\x35\x23\x06" - b"\x4c\x9b\xf4\xee" - ) - - def test_signer(self): - with self.assertRaises(ValueError): - signer = RSAX931Signer("bogus key data") - with self.assertRaises(ValueError): - signer = RSAX931Signer(RSAX931Test.pubkey_data) - - signer = RSAX931Signer(RSAX931Test.privkey_data) - with self.assertRaises(ValueError): - signer.sign("x" * 255) # message too long - - sig = signer.sign(RSAX931Test.hello_world) - self.assertEqual(RSAX931Test.hello_world_sig, sig) - - def test_verifier(self): - with self.assertRaises(ValueError): - verifier = RSAX931Verifier("bogus key data") - with self.assertRaises(ValueError): - verifier = RSAX931Verifier(RSAX931Test.privkey_data) - - verifier = RSAX931Verifier(RSAX931Test.pubkey_data) - with self.assertRaises(ValueError): - verifier.verify("") - with self.assertRaises(ValueError): - verifier.verify(RSAX931Test.hello_world_sig + b"junk") - - msg = verifier.verify(RSAX931Test.hello_world_sig) - self.assertEqual(RSAX931Test.hello_world, msg) - - @pytest.mark.skip_unless_on_windows - def test_find_libcrypto_win32(self): - """ - Test _find_libcrypto on Windows hosts. - """ - lib_path = _find_libcrypto() - self.assertIn("libcrypto", lib_path) - - @pytest.mark.skip_unless_on_smartos - def test_find_libcrypto_smartos(self): - """ - Test _find_libcrypto on a SmartOS host. - """ - lib_path = _find_libcrypto() - self.assertTrue( - fnmatch.fnmatch( - lib_path, os.path.join(os.path.dirname(sys.executable), "libcrypto*") - ) - ) - - @pytest.mark.skip_unless_on_sunos - def test_find_libcrypto_sunos(self): - """ - Test _find_libcrypto on a Solaris-like host. - """ - lib_path = _find_libcrypto() - passed = False - for i in ("/opt/local/lib/libcrypto.so*", "/opt/tools/lib/libcrypto.so*"): - if fnmatch.fnmatch(lib_path, i): - passed = True - break - self.assertTrue(passed) - - @pytest.mark.skip_unless_on_aix - def test_find_libcrypto_aix(self): - """ - Test _find_libcrypto on an IBM AIX host. - """ - lib_path = _find_libcrypto() - if os.path.isdir("/opt/salt/lib"): - self.assertTrue(fnmatch.fnmatch(lib_path, "/opt/salt/lib/libcrypto.so*")) - else: - self.assertTrue( - fnmatch.fnmatch(lib_path, "/opt/freeware/lib/libcrypto.so*") - ) - - def test_find_libcrypto_with_system_before_catalina(self): - """ - Test _find_libcrypto on a pre-Catalina macOS host by simulating not - finding any other libcryptos and verifying that it defaults to system. - """ - with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( - platform, "mac_ver", lambda: ("10.14.2", (), "") - ), patch.object(glob, "glob", lambda _: []), patch.object( - sys, "platform", "macosx" - ): - lib_path = _find_libcrypto() - self.assertEqual(lib_path, "/usr/lib/libcrypto.dylib") - - def test_find_libcrypto_darwin_catalina(self): - """ - Test _find_libcrypto on a macOS Catalina host where there are no custom - libcryptos and defaulting to the versioned system libraries. - """ - available = [ - "/usr/lib/libcrypto.0.9.7.dylib", - "/usr/lib/libcrypto.0.9.8.dylib", - "/usr/lib/libcrypto.35.dylib", - "/usr/lib/libcrypto.41.dylib", - "/usr/lib/libcrypto.42.dylib", - "/usr/lib/libcrypto.44.dylib", - "/usr/lib/libcrypto.dylib", - ] - - def test_glob(pattern): - return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] - - with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( - platform, "mac_ver", lambda: ("10.15.2", (), "") - ), patch.object(sys, "platform", "macosx"), patch.object( - glob, "glob", test_glob - ): - lib_path = _find_libcrypto() - self.assertEqual("/usr/lib/libcrypto.44.dylib", lib_path) - - def test_find_libcrypto_darwin_bigsur_packaged(self): - """ - Test _find_libcrypto on a Darwin-like macOS host where there isn't a - lacation returned by ctypes.util.find_library() and the libcrypto - installation comes from a package manager (ports, brew, salt). - """ - managed_paths = { - "salt": "/opt/salt/lib/libcrypto.dylib", - "brew": "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", - "port": "/opt/local/lib/libcrypto.dylib", - } - - saved_getenv = os.getenv - - def mock_getenv(env): - def test_getenv(var, default=None): - return env.get(var, saved_getenv(var, default)) - - return test_getenv - - def mock_glob(expected_lib): - def test_glob(pattern): - if fnmatch.fnmatch(expected_lib, pattern): - return [expected_lib] - return [] - - return test_glob - - with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( - platform, "mac_ver", lambda: ("11.2.2", (), "") - ), patch.object(sys, "platform", "macosx"): - for package_manager, expected_lib in managed_paths.items(): - if package_manager == "brew": - env = {"HOMEBREW_PREFIX": "/test/homebrew/prefix"} - else: - env = {"HOMEBREW_PREFIX": ""} - with patch.object(os, "getenv", mock_getenv(env)): - with patch.object(glob, "glob", mock_glob(expected_lib)): - lib_path = _find_libcrypto() - - self.assertEqual(expected_lib, lib_path) - - # On Big Sur, there's nothing else to fall back on. - with patch.object(glob, "glob", lambda _: []): - with self.assertRaises(OSError): - lib_path = _find_libcrypto() - - def test_find_libcrypto_unsupported(self): - """ - Ensure that _find_libcrypto works correctly on an unsupported host OS. - """ - with patch.object(ctypes.util, "find_library", lambda a: None), patch.object( - glob, "glob", lambda a: [] - ), patch.object(sys, "platform", "unknown"), patch.object( - salt.utils.platform, "is_darwin", lambda: False - ), self.assertRaises( - OSError - ): - _find_libcrypto() - - def test_load_libcrypto(self): - """ - Test _load_libcrypto generically. - """ - lib = _load_libcrypto() - self.assertTrue(isinstance(lib, ctypes.CDLL)) - # Try to cover both pre and post OpenSSL 1.1. - self.assertTrue( - hasattr(lib, "OpenSSL_version_num") - or hasattr(lib, "OPENSSL_init_crypto") - or hasattr(lib, "OPENSSL_no_config") - ) - - def test_find_libcrypto_darwin_onedir(self): - """ - Test _find_libcrypto on a macOS - libcryptos and defaulting to the versioned system libraries. - """ - available = [ - "/usr/lib/libcrypto.0.9.7.dylib", - "/usr/lib/libcrypto.0.9.8.dylib", - "/usr/lib/libcrypto.35.dylib", - "/usr/lib/libcrypto.41.dylib", - "/usr/lib/libcrypto.42.dylib", - "/usr/lib/libcrypto.44.dylib", - "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", - "/opt/local/lib/libcrypto.dylib", - "lib/libcrypto.dylib", - ] - - def test_glob(pattern): - return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] - - with patch.object(glob, "glob", test_glob), patch.object( - salt.utils.platform, "is_darwin", lambda: True - ), patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")), patch.object( - sys, "platform", "macosx" - ): - lib_path = _find_libcrypto() - self.assertEqual("lib/libcrypto.dylib", lib_path) From 39315498d02119455586addee166b7ce2fb67301 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Mon, 31 Jul 2023 11:41:34 -0700 Subject: [PATCH 44/73] removing comment --- tests/pytests/unit/utils/test_rsax931.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/pytests/unit/utils/test_rsax931.py b/tests/pytests/unit/utils/test_rsax931.py index 75d176d7020..a1c81e653f8 100644 --- a/tests/pytests/unit/utils/test_rsax931.py +++ b/tests/pytests/unit/utils/test_rsax931.py @@ -13,8 +13,6 @@ import sys import pytest import salt.utils.platform - -# salt libs from salt.utils.rsax931 import ( RSAX931Signer, RSAX931Verifier, From 2628158336342390a4b69f090b0126a7ddf4298b Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 9 Aug 2023 12:46:43 +0100 Subject: [PATCH 45/73] Remove extra character Signed-off-by: Pedro Algarvio --- .github/workflows/test-package-downloads-action-macos.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index 1351469f64f..862e1c5adf5 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -259,7 +259,7 @@ jobs: report: name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}t + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: From 8f750fa7ae115c48c16ca03ef3f16e4ba76f921c Mon Sep 17 00:00:00 2001 From: Salt Project Packaging Date: Wed, 9 Aug 2023 12:08:57 +0000 Subject: [PATCH 46/73] Release v3006.2 --- CHANGELOG.md | 35 +++++++++++ changelog/64336.security.md | 4 -- changelog/64595.security.md | 12 ---- changelog/64718.security.md | 1 - changelog/64719.security.md | 3 - changelog/64897.fixed.md | 2 - changelog/cve-2023-20897.security.md | 1 - changelog/cve-2023-20898.security.md | 1 - doc/man/salt-api.1 | 2 +- doc/man/salt-call.1 | 2 +- doc/man/salt-cloud.1 | 2 +- doc/man/salt-cp.1 | 2 +- doc/man/salt-key.1 | 2 +- doc/man/salt-master.1 | 2 +- doc/man/salt-minion.1 | 2 +- doc/man/salt-proxy.1 | 2 +- doc/man/salt-run.1 | 2 +- doc/man/salt-ssh.1 | 2 +- doc/man/salt-syndic.1 | 2 +- doc/man/salt.1 | 2 +- doc/man/salt.7 | 60 +++++++++++++++++-- doc/man/spm.1 | 2 +- doc/topics/releases/3006.2.md | 50 ++++++++++++++++ .../releases/templates/3006.2.md.template | 14 +++++ pkg/debian/changelog | 36 +++++++++++ pkg/rpm/salt.spec | 35 ++++++++++- 26 files changed, 236 insertions(+), 44 deletions(-) delete mode 100644 changelog/64336.security.md delete mode 100644 changelog/64595.security.md delete mode 100644 changelog/64718.security.md delete mode 100644 changelog/64719.security.md delete mode 100644 changelog/64897.fixed.md delete mode 100644 changelog/cve-2023-20897.security.md delete mode 100644 changelog/cve-2023-20898.security.md create mode 100644 doc/topics/releases/3006.2.md create mode 100644 doc/topics/releases/templates/3006.2.md.template diff --git a/CHANGELOG.md b/CHANGELOG.md index 1132b94882b..919ecdabe94 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,41 @@ Versions are `MAJOR.PATCH`. # Changelog +## 3006.2 (2023-08-09) + + +### Fixed + +- In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + + +### Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) + + ## 3006.1 (2023-05-05) diff --git a/changelog/64336.security.md b/changelog/64336.security.md deleted file mode 100644 index a7b1c186a1d..00000000000 --- a/changelog/64336.security.md +++ /dev/null @@ -1,4 +0,0 @@ -Upgrade to `requests==2.31.0` - -Due to: - * https://github.com/advisories/GHSA-j8r2-6x86-q33q diff --git a/changelog/64595.security.md b/changelog/64595.security.md deleted file mode 100644 index 0875e68e625..00000000000 --- a/changelog/64595.security.md +++ /dev/null @@ -1,12 +0,0 @@ -Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) - -This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. - -Also resolves the following cryptography advisories: - -Due to: - * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v - * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 - * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r - -There is no security upgrade available for Py3.5 diff --git a/changelog/64718.security.md b/changelog/64718.security.md deleted file mode 100644 index b40aef1ad85..00000000000 --- a/changelog/64718.security.md +++ /dev/null @@ -1 +0,0 @@ -Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 diff --git a/changelog/64719.security.md b/changelog/64719.security.md deleted file mode 100644 index 3476499d3d7..00000000000 --- a/changelog/64719.security.md +++ /dev/null @@ -1,3 +0,0 @@ -Upgrade `relenv` to `0.13.2` and Python to `3.10.12` - -Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 diff --git a/changelog/64897.fixed.md b/changelog/64897.fixed.md deleted file mode 100644 index 6914040120c..00000000000 --- a/changelog/64897.fixed.md +++ /dev/null @@ -1,2 +0,0 @@ -In scenarios where PythonNet fails to load, Salt will now fall back to WMI for -gathering grains information diff --git a/changelog/cve-2023-20897.security.md b/changelog/cve-2023-20897.security.md deleted file mode 100644 index 4b6171e5d69..00000000000 --- a/changelog/cve-2023-20897.security.md +++ /dev/null @@ -1 +0,0 @@ -fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection diff --git a/changelog/cve-2023-20898.security.md b/changelog/cve-2023-20898.security.md deleted file mode 100644 index 44f1729192d..00000000000 --- a/changelog/cve-2023-20898.security.md +++ /dev/null @@ -1 +0,0 @@ -Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. diff --git a/doc/man/salt-api.1 b/doc/man/salt-api.1 index 379d345536a..745a68724f9 100644 --- a/doc/man/salt-api.1 +++ b/doc/man/salt-api.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-API" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-API" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-api \- salt-api Command .sp diff --git a/doc/man/salt-call.1 b/doc/man/salt-call.1 index 460cf91dddb..66814a40c1e 100644 --- a/doc/man/salt-call.1 +++ b/doc/man/salt-call.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CALL" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-CALL" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-call \- salt-call Documentation .SH SYNOPSIS diff --git a/doc/man/salt-cloud.1 b/doc/man/salt-cloud.1 index 2a75e218e04..efec023c258 100644 --- a/doc/man/salt-cloud.1 +++ b/doc/man/salt-cloud.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CLOUD" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-CLOUD" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-cloud \- Salt Cloud Command .sp diff --git a/doc/man/salt-cp.1 b/doc/man/salt-cp.1 index 74ab95a2bcc..b69ecddd447 100644 --- a/doc/man/salt-cp.1 +++ b/doc/man/salt-cp.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CP" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-CP" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-cp \- salt-cp Documentation .sp diff --git a/doc/man/salt-key.1 b/doc/man/salt-key.1 index c4723cae0e0..9aac8c994ac 100644 --- a/doc/man/salt-key.1 +++ b/doc/man/salt-key.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-KEY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-KEY" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-key \- salt-key Documentation .SH SYNOPSIS diff --git a/doc/man/salt-master.1 b/doc/man/salt-master.1 index 72fa39ba91d..0e1e4d87071 100644 --- a/doc/man/salt-master.1 +++ b/doc/man/salt-master.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MASTER" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-MASTER" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-master \- salt-master Documentation .sp diff --git a/doc/man/salt-minion.1 b/doc/man/salt-minion.1 index fc550d0085f..886f8689a57 100644 --- a/doc/man/salt-minion.1 +++ b/doc/man/salt-minion.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MINION" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-MINION" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-minion \- salt-minion Documentation .sp diff --git a/doc/man/salt-proxy.1 b/doc/man/salt-proxy.1 index 9a78879db3a..708651dafc0 100644 --- a/doc/man/salt-proxy.1 +++ b/doc/man/salt-proxy.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-PROXY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-PROXY" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-proxy \- salt-proxy Documentation .sp diff --git a/doc/man/salt-run.1 b/doc/man/salt-run.1 index d4fbc53dc98..22c2037558e 100644 --- a/doc/man/salt-run.1 +++ b/doc/man/salt-run.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-RUN" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-RUN" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-run \- salt-run Documentation .sp diff --git a/doc/man/salt-ssh.1 b/doc/man/salt-ssh.1 index 3519bb75e1f..23c95c73061 100644 --- a/doc/man/salt-ssh.1 +++ b/doc/man/salt-ssh.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SSH" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-SSH" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-ssh \- salt-ssh Documentation .SH SYNOPSIS diff --git a/doc/man/salt-syndic.1 b/doc/man/salt-syndic.1 index 3b50a769071..506558960cc 100644 --- a/doc/man/salt-syndic.1 +++ b/doc/man/salt-syndic.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SYNDIC" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT-SYNDIC" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt-syndic \- salt-syndic Documentation .sp diff --git a/doc/man/salt.1 b/doc/man/salt.1 index 1c6873a02e1..f5c641a31ac 100644 --- a/doc/man/salt.1 +++ b/doc/man/salt.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt \- salt .SH SYNOPSIS diff --git a/doc/man/salt.7 b/doc/man/salt.7 index d50a2d55401..0723746f129 100644 --- a/doc/man/salt.7 +++ b/doc/man/salt.7 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "7" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SALT" "7" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME salt \- Salt Documentation .SH SALT PROJECT @@ -193937,7 +193937,7 @@ Passes through all the parameters described in the \fI\%utils.http.query function\fP: .INDENT 7.0 .TP -.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.1\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) +.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.2\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) Query a resource, and decode the return data .UNINDENT .INDENT 7.0 @@ -380698,12 +380698,12 @@ It also use C bindings if they are available. .INDENT 0.0 .TP .B salt.serializers.yaml.BaseDumper -alias of \fBSafeDumper\fP +alias of \fBCSafeDumper\fP .UNINDENT .INDENT 0.0 .TP .B salt.serializers.yaml.BaseLoader -alias of \fBSafeLoader\fP +alias of \fBCSafeLoader\fP .UNINDENT .INDENT 0.0 .TP @@ -380976,7 +380976,7 @@ alias of \fBSafeDumper\fP .INDENT 0.0 .TP .B salt.serializers.yamlex.BaseLoader -alias of \fBSafeLoader\fP +alias of \fBCSafeLoader\fP .UNINDENT .INDENT 0.0 .TP @@ -457275,7 +457275,7 @@ installed2 .UNINDENT .INDENT 0.0 .TP -.B salt.states.zcbuildout.installed(name, config=\(aqbuildout.cfg\(aq, quiet=False, parts=None, user=None, env=(), buildout_ver=None, test_release=False, distribute=None, new_st=None, offline=False, newest=False, python=\(aq/opt/actions\-runner/_work/salt/salt/.tools\-venvs/docs/bin/python\(aq, debug=False, verbose=False, unless=None, onlyif=None, use_vt=False, loglevel=\(aqdebug\(aq, **kwargs) +.B salt.states.zcbuildout.installed(name, config=\(aqbuildout.cfg\(aq, quiet=False, parts=None, user=None, env=(), buildout_ver=None, test_release=False, distribute=None, new_st=None, offline=False, newest=False, python=\(aq/opt/actions\-runner/_work/salt\-priv/salt\-priv/.tools\-venvs/docs/bin/python\(aq, debug=False, verbose=False, unless=None, onlyif=None, use_vt=False, loglevel=\(aqdebug\(aq, **kwargs) Install buildout in a specific directory .sp It is a thin wrapper to modules.buildout.buildout @@ -476693,6 +476693,54 @@ Cloud deployment directories are owned by salt user and group \fI\%#64204\fP .IP \(bu 2 \fBlgpo_reg\fP state now enforces and reports changes to the registry \fI\%#64222\fP .UNINDENT +(release\-3006.2)= +.SS Salt 3006.2 release notes +.SS Changelog +.SS Fixed +.INDENT 0.0 +.IP \(bu 2 +In scenarios where PythonNet fails to load, Salt will now fall back to WMI for +gathering grains information \fI\%#64897\fP +.UNINDENT +.SS Security +.INDENT 0.0 +.IP \(bu 2 +fix CVE\-2023\-20897 by catching exception instead of letting exception disrupt connection \fI\%#cve\-2023\-20897\fP +.IP \(bu 2 +Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. \fI\%#cve\-2023\-20898\fP +.IP \(bu 2 +Upgrade to \fBrequests==2.31.0\fP +.sp +Due to: +.INDENT 2.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-j8r2\-6x86\-q33q\fP \fI\%#64336\fP +.UNINDENT +.IP \(bu 2 +Upgrade to \fBcryptography==41.0.3\fP(and therefor \fBpyopenssl==23.2.0\fP due to \fI\%https://github.com/advisories/GHSA\-jm77\-qphf\-c4w8\fP) +.sp +This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre\-existing wheels. +.sp +Also resolves the following cryptography advisories: +.sp +Due to: +.INDENT 2.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-5cpq\-8wj7\-hf2v\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-x4qr\-2fvf\-3mr5\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-w7pp\-m8wf\-vj6r\fP +.UNINDENT +.sp +There is no security upgrade available for Py3.5 \fI\%#64595\fP +.IP \(bu 2 +Bump to \fBcertifi==2023.07.22\fP due to \fI\%https://github.com/advisories/GHSA\-xqr8\-7jwr\-rhp7\fP \fI\%#64718\fP +.IP \(bu 2 +Upgrade \fBrelenv\fP to \fB0.13.2\fP and Python to \fB3.10.12\fP +.sp +Addresses multiple CVEs in Python\(aqs dependencies: \fI\%https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python\-3\-10\-12\fP \fI\%#64719\fP +.UNINDENT .sp See \fI\%Install a release candidate\fP for more information about installing an RC when one is available. diff --git a/doc/man/spm.1 b/doc/man/spm.1 index 90cc6e3d2d7..92380dc8d5e 100644 --- a/doc/man/spm.1 +++ b/doc/man/spm.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SPM" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" +.TH "SPM" "1" "Generated on August 09, 2023 at 12:02:24 PM UTC." "3006.2" "Salt" .SH NAME spm \- Salt Package Manager Command .sp diff --git a/doc/topics/releases/3006.2.md b/doc/topics/releases/3006.2.md new file mode 100644 index 00000000000..7e607729835 --- /dev/null +++ b/doc/topics/releases/3006.2.md @@ -0,0 +1,50 @@ +(release-3006.2)= +# Salt 3006.2 release notes + + + + + + + +## Changelog + +### Fixed + +- In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + + +### Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) diff --git a/doc/topics/releases/templates/3006.2.md.template b/doc/topics/releases/templates/3006.2.md.template new file mode 100644 index 00000000000..959ae89599b --- /dev/null +++ b/doc/topics/releases/templates/3006.2.md.template @@ -0,0 +1,14 @@ +(release-3006.2)= +# Salt 3006.2 release notes{{ unreleased }} +{{ warning }} + + + + +## Changelog +{{ changelog }} diff --git a/pkg/debian/changelog b/pkg/debian/changelog index 035085104b3..921752cfca3 100644 --- a/pkg/debian/changelog +++ b/pkg/debian/changelog @@ -1,3 +1,39 @@ +salt (3006.2) stable; urgency=medium + + + # Fixed + + * In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + + # Security + + * fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) + * Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) + * Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA*j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) + * Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre*existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA*5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA*x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA*w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) + * Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) + * Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python*3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) + + + -- Salt Project Packaging Wed, 09 Aug 2023 12:01:52 +0000 + salt (3006.1) stable; urgency=medium diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 50b8e9c9783..f3cae281605 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -25,7 +25,7 @@ %define fish_dir %{_datadir}/fish/vendor_functions.d Name: salt -Version: 3006.1 +Version: 3006.2 Release: 0 Summary: A parallel remote execution system Group: System Environment/Daemons @@ -512,6 +512,39 @@ fi %changelog +* Wed Aug 09 2023 Salt Project Packaging - 3006.2 + +# Fixed + +- In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + +# Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) + + * Fri May 05 2023 Salt Project Packaging - 3006.1 # Fixed From 0fda64866301c2751ea29d22877663a89a085d0a Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 9 Aug 2023 14:12:15 -0600 Subject: [PATCH 47/73] Fix SELinux get policy with trailing whitespace --- changelog/63336.fixed.md | 1 + salt/modules/selinux.py | 4 +++- tests/pytests/unit/modules/test_selinux.py | 6 +++--- 3 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 changelog/63336.fixed.md diff --git a/changelog/63336.fixed.md b/changelog/63336.fixed.md new file mode 100644 index 00000000000..0223a4b0d65 --- /dev/null +++ b/changelog/63336.fixed.md @@ -0,0 +1 @@ +Fix SELinux get policy with trailing whitespace diff --git a/salt/modules/selinux.py b/salt/modules/selinux.py index dbfa93928bf..6b02900db19 100644 --- a/salt/modules/selinux.py +++ b/salt/modules/selinux.py @@ -477,8 +477,10 @@ def fcontext_get_policy( if filetype: _validate_filetype(filetype) re_spacer = "[ ]+" + re_optional_spacer = "[ |\t]*" cmd_kwargs = { "spacer": re_spacer, + "ospacer": re_optional_spacer, "filespec": re.escape(name), "sel_user": sel_user or "[^:]+", "sel_role": "[^:]+", # se_role for file context is always object_r @@ -490,7 +492,7 @@ def fcontext_get_policy( ) cmd = ( "semanage fcontext -l | egrep " - + "'^{filespec}{spacer}{filetype}{spacer}{sel_user}:{sel_role}:{sel_type}:{sel_level}$'".format( + + "'^{filespec}{spacer}{filetype}{spacer}{sel_user}:{sel_role}:{sel_type}:{sel_level}{ospacer}$'".format( **cmd_kwargs ) ) diff --git a/tests/pytests/unit/modules/test_selinux.py b/tests/pytests/unit/modules/test_selinux.py index dd38b6721f5..05d3ca25e24 100644 --- a/tests/pytests/unit/modules/test_selinux.py +++ b/tests/pytests/unit/modules/test_selinux.py @@ -19,7 +19,7 @@ def test_fcontext_get_policy_parsing(): { "semanage_out": ( "/var/www(/.*)? all files " - " system_u:object_r:httpd_sys_content_t:s0" + " system_u:object_r:httpd_sys_content_t:s0 " ), "name": "/var/www(/.*)?", "filetype": "all files", @@ -31,7 +31,7 @@ def test_fcontext_get_policy_parsing(): { "semanage_out": ( "/var/www(/.*)? all files " - " system_u:object_r:httpd_sys_content_t:s0" + " system_u:object_r:httpd_sys_content_t:s0 " ), "name": "/var/www(/.*)?", "filetype": "all files", @@ -43,7 +43,7 @@ def test_fcontext_get_policy_parsing(): { "semanage_out": ( "/var/lib/dhcp3? directory " - " system_u:object_r:dhcp_state_t:s0" + " system_u:object_r:dhcp_state_t:s0 " ), "name": "/var/lib/dhcp3?", "filetype": "directory", From a7903ad47338bb91aef242d01e44191158faae9f Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Thu, 10 Aug 2023 18:20:09 -0700 Subject: [PATCH 48/73] Update win_network.py Fixing lint --- salt/utils/win_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/salt/utils/win_network.py b/salt/utils/win_network.py index f7156dd40b8..eeae8fc091b 100644 --- a/salt/utils/win_network.py +++ b/salt/utils/win_network.py @@ -59,6 +59,7 @@ if IS_WINDOWS: try: # pylint: disable=unused-import import clr + # pylint: enable=unused-import from System.Net import NetworkInformation except RuntimeError: From 33310ac37d638f159307b72f7923ca1521c6dc05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Antonio=20Ram=C3=B3n=20S=C3=A1nchez=20Morales?= Date: Fri, 30 Jun 2023 04:00:15 +0200 Subject: [PATCH 49/73] fix(test_mac_brew_pkg): adapt test to also mock path.which MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Antonio Ramón Sánchez Morales (cherry picked from commit 4ac4da9aa477d41fae5cbe1386205294de815d11) --- tests/pytests/unit/modules/test_mac_brew_pkg.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/modules/test_mac_brew_pkg.py b/tests/pytests/unit/modules/test_mac_brew_pkg.py index f4ad86a66d2..749c9765cce 100644 --- a/tests/pytests/unit/modules/test_mac_brew_pkg.py +++ b/tests/pytests/unit/modules/test_mac_brew_pkg.py @@ -440,8 +440,9 @@ def test_homebrew_bin(HOMEBREW_BIN): Tests the path to the homebrew binary """ mock_path = MagicMock(return_value="/usr/local") - with patch.dict(mac_brew.__salt__, {"cmd.run": mock_path}): - assert mac_brew._homebrew_bin() == HOMEBREW_BIN + with patch("salt.utils.path.which", MagicMock(return_value=HOMEBREW_BIN)): + with patch.dict(mac_brew.__salt__, {"cmd.run": mock_path}): + assert mac_brew._homebrew_bin() == HOMEBREW_BIN # 'list_pkgs' function tests: 2 From bd17823792a0b1a30456bfcb41ded4c64e139f59 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 4 Aug 2023 16:31:37 -0600 Subject: [PATCH 50/73] Add salt.ufw to salt-master install on Debian and Ubuntu --- pkg/common/salt-master.upstart.rhel6 | 15 --------------- pkg/common/salt-minion.upstart.rhel6 | 21 --------------------- pkg/common/salt-syndic.upstart.rhel6 | 14 -------------- pkg/debian/salt-master.install | 1 + 4 files changed, 1 insertion(+), 50 deletions(-) delete mode 100644 pkg/common/salt-master.upstart.rhel6 delete mode 100644 pkg/common/salt-minion.upstart.rhel6 delete mode 100644 pkg/common/salt-syndic.upstart.rhel6 diff --git a/pkg/common/salt-master.upstart.rhel6 b/pkg/common/salt-master.upstart.rhel6 deleted file mode 100644 index 412297ae45e..00000000000 --- a/pkg/common/salt-master.upstart.rhel6 +++ /dev/null @@ -1,15 +0,0 @@ -description "Salt Master" - -start on runlevel [2345] -stop on runlevel [!2345] -limit nofile 100000 100000 - -script - # Read configuration variable file if it is present - [ -f /etc/default/$UPSTART_JOB ] && . /etc/default/$UPSTART_JOB - - # Activate the virtualenv if defined - [ -f $SALT_USE_VIRTUALENV/bin/activate ] && . $SALT_USE_VIRTUALENV/bin/activate - - exec salt-master -end script diff --git a/pkg/common/salt-minion.upstart.rhel6 b/pkg/common/salt-minion.upstart.rhel6 deleted file mode 100644 index ef5d588f879..00000000000 --- a/pkg/common/salt-minion.upstart.rhel6 +++ /dev/null @@ -1,21 +0,0 @@ -description "Salt Minion" - -start on runlevel [2345] -stop on runlevel [!2345] - -# The respawn in the minion is known to cause problems -# because if the main minion process dies it has done -# so most likely for a good reason. Uncomment these -# two lines to enable respawn -#respawn -#respawn limit 10 5 - -script - # Read configuration variable file if it is present - [ -f /etc/default/$UPSTART_JOB ] && . /etc/default/$UPSTART_JOB - - # Activate the virtualenv if defined - [ -f $SALT_USE_VIRTUALENV/bin/activate ] && . $SALT_USE_VIRTUALENV/bin/activate - - exec salt-minion -end script diff --git a/pkg/common/salt-syndic.upstart.rhel6 b/pkg/common/salt-syndic.upstart.rhel6 deleted file mode 100644 index c9467156b0d..00000000000 --- a/pkg/common/salt-syndic.upstart.rhel6 +++ /dev/null @@ -1,14 +0,0 @@ -description "salt-syndic" - -start on runlevel [2345] -stop on runlevel [!2345] - -script - # Read configuration variable file if it is present - [ -f /etc/default/$UPSTART_JOB ] && . /etc/default/$UPSTART_JOB - - # Activate the virtualenv if defined - [ -f $SALT_USE_VIRTUALENV/bin/activate ] && . $SALT_USE_VIRTUALENV/bin/activate - - exec salt-syndic -end script diff --git a/pkg/debian/salt-master.install b/pkg/debian/salt-master.install index 3d665d5b164..aad95813286 100644 --- a/pkg/debian/salt-master.install +++ b/pkg/debian/salt-master.install @@ -1 +1,2 @@ pkg/common/salt-master.service /lib/systemd/system +pkg/common/salt.ufw /etc/ufw/applications.d/salt-master From f0743ec9a8c1580bba1efc95314b65873ebfc61b Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 4 Aug 2023 17:29:30 -0600 Subject: [PATCH 51/73] Added tests for ufw allow salt --- changelog/64572.fixed.md | 1 + pkg/debian/salt-master.dirs | 1 + .../integration/daemons/test_masterapi.py | 21 +++++++++++++++++++ 3 files changed, 23 insertions(+) create mode 100644 changelog/64572.fixed.md diff --git a/changelog/64572.fixed.md b/changelog/64572.fixed.md new file mode 100644 index 00000000000..5ff75f36508 --- /dev/null +++ b/changelog/64572.fixed.md @@ -0,0 +1 @@ +Added salt.ufw to salt-master install on Debian and Ubuntu diff --git a/pkg/debian/salt-master.dirs b/pkg/debian/salt-master.dirs index faa45743bbb..cffed208e63 100644 --- a/pkg/debian/salt-master.dirs +++ b/pkg/debian/salt-master.dirs @@ -1,4 +1,5 @@ /etc/salt/master.d +/etc/ufw/applications.d/salt-master /etc/salt/pki/master/minions /etc/salt/pki/master/minions_autosign /etc/salt/pki/master/minions_denied diff --git a/tests/pytests/integration/daemons/test_masterapi.py b/tests/pytests/integration/daemons/test_masterapi.py index da4c9698a32..d57eeeab092 100644 --- a/tests/pytests/integration/daemons/test_masterapi.py +++ b/tests/pytests/integration/daemons/test_masterapi.py @@ -5,6 +5,7 @@ Test autosigning minions based on grain values. import os import shutil import stat +import subprocess import pytest @@ -120,3 +121,23 @@ def test_autosign_grains_fail( ) # get minion to try to authenticate itself again assert salt_minion.id not in salt_key_cli.run("-l", "acc") assert salt_minion.id in salt_key_cli.run("-l", "un") + + +@pytest.mark.skip_unless_on_linux +@pytest.mark.slow_test +def test_ufw_allow(salt_master, grains): + if grains["os_family"] != "Debian": + pytest.skip("Only runs on Debian family.") + + expected_output = """Skipping adding existing rule +Skipping adding existing rule (v6) + """ + + proc = subprocess.Popen( + "ufw allow salt\n", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + out, err = proc.communicate() + out_strg = out.decode() + err_strg = err.decode() + assert out_strg == expected_output + assert err_strg != "ERROR: Could not find a profile matching 'salt'" From bbeffed4607cf6badd16f49ad667586d8dd68aa2 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 14 Aug 2023 13:00:41 -0400 Subject: [PATCH 52/73] Add `Breaks` and `Replaces` statements to `debian/control` --- pkg/debian/control | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkg/debian/control b/pkg/debian/control index c8303101859..6c813fc1a67 100644 --- a/pkg/debian/control +++ b/pkg/debian/control @@ -17,6 +17,7 @@ Vcs-Git: git://github.com/saltstack/salt.git Package: salt-common Architecture: amd64 arm64 Depends: ${misc:Depends} +Breaks: salt-minion (<= 3006.1) Suggests: ifupdown Recommends: lsb-release Description: shared libraries that salt requires for all packages @@ -42,8 +43,8 @@ Description: shared libraries that salt requires for all packages Package: salt-master Architecture: amd64 arm64 -Replaces: salt-common (<= 3005.1+ds-4) -Breaks: salt-common (<= 3005.1+ds-4) +Replaces: salt-common (<= 3006.1) +Breaks: salt-common (<= 3006.1) Depends: salt-common (= ${source:Version}), ${misc:Depends} Description: remote manager to administer servers via salt From c2676f4e805abf199a497660c6c80290eda93401 Mon Sep 17 00:00:00 2001 From: Thomas Phipps Date: Mon, 14 Aug 2023 16:12:49 +0000 Subject: [PATCH 53/73] manage runner subset documentation fix. --- salt/runners/manage.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/salt/runners/manage.py b/salt/runners/manage.py index 9dc67ed7282..3a0a201a4ef 100644 --- a/salt/runners/manage.py +++ b/salt/runners/manage.py @@ -223,7 +223,7 @@ def list_state(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -253,7 +253,7 @@ def list_not_state(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -285,7 +285,7 @@ def present(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -308,7 +308,7 @@ def not_present(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -331,7 +331,7 @@ def joined(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -354,7 +354,7 @@ def not_joined(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -377,7 +377,7 @@ def allowed(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -400,7 +400,7 @@ def not_allowed(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -423,7 +423,7 @@ def alived(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -446,7 +446,7 @@ def not_alived(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -469,7 +469,7 @@ def reaped(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -492,7 +492,7 @@ def not_reaped(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. From c5fcaffc7a238e3c6c418120c57ed65b5c2b34f0 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 8 Aug 2023 21:57:58 -0700 Subject: [PATCH 54/73] Preserve master credentials on spawning platforms Prevent spawning platform minions from having to re-authenticate on every job when using multiprocessing=True --- changelog/64914.fixed.md | 2 + salt/metaproxy/deltaproxy.py | 10 ++-- salt/metaproxy/proxy.py | 10 ++-- salt/minion.py | 14 ++++-- .../pytests/integration/minion/test_reauth.py | 49 +++++++++++++++++++ 5 files changed, 74 insertions(+), 11 deletions(-) create mode 100644 changelog/64914.fixed.md create mode 100644 tests/pytests/integration/minion/test_reauth.py diff --git a/changelog/64914.fixed.md b/changelog/64914.fixed.md new file mode 100644 index 00000000000..8506ca832a0 --- /dev/null +++ b/changelog/64914.fixed.md @@ -0,0 +1,2 @@ +Preserve credentials on spawning platforms, minions no longer re-authenticate +with every job when using `multiprocessing=True`. diff --git a/salt/metaproxy/deltaproxy.py b/salt/metaproxy/deltaproxy.py index d866d6f4c1d..51e390fe23c 100644 --- a/salt/metaproxy/deltaproxy.py +++ b/salt/metaproxy/deltaproxy.py @@ -580,7 +580,7 @@ def subproxy_post_master_init(minion_id, uid, opts, main_proxy, main_utils): return {"proxy_minion": _proxy_minion, "proxy_opts": proxyopts} -def target(cls, minion_instance, opts, data, connected): +def target(cls, minion_instance, opts, data, connected, creds_map): """ Handle targeting of the minion. @@ -593,6 +593,8 @@ def target(cls, minion_instance, opts, data, connected): minion_instance.opts["id"], opts["id"], ) + if creds_map: + salt.crypt.AsyncAuth.creds_map = creds_map if not hasattr(minion_instance, "proc_dir"): uid = salt.utils.user.get_uid(user=opts.get("user", None)) @@ -1061,21 +1063,23 @@ def handle_decoded_payload(self, data): instance = self multiprocessing_enabled = self.opts.get("multiprocessing", True) name = "ProcessPayload(jid={})".format(data["jid"]) + creds_map = None if multiprocessing_enabled: if salt.utils.platform.spawning_platform(): # let python reconstruct the minion on the other side if we"re # running on spawning platforms instance = None + creds_map = salt.crypt.AsyncAuth.creds_map with default_signals(signal.SIGINT, signal.SIGTERM): process = SignalHandlingProcess( target=target, - args=(self, instance, self.opts, data, self.connected), + args=(self, instance, self.opts, data, self.connected, creds_map), name=name, ) else: process = threading.Thread( target=target, - args=(self, instance, self.opts, data, self.connected), + args=(self, instance, self.opts, data, self.connected, creds_map), name=name, ) diff --git a/salt/metaproxy/proxy.py b/salt/metaproxy/proxy.py index a399c15ef16..40e378173e1 100644 --- a/salt/metaproxy/proxy.py +++ b/salt/metaproxy/proxy.py @@ -309,13 +309,15 @@ def post_master_init(self, master): self.ready = True -def target(cls, minion_instance, opts, data, connected): +def target(cls, minion_instance, opts, data, connected, creds_map): """ Handle targeting of the minion. Calling _thread_multi_return or _thread_return depending on a single or multiple commands. """ + if creds_map: + salt.crypt.AsyncAuth.creds_map = creds_map if not minion_instance: minion_instance = cls(opts) minion_instance.connected = connected @@ -814,21 +816,23 @@ def handle_decoded_payload(self, data): instance = self multiprocessing_enabled = self.opts.get("multiprocessing", True) name = "ProcessPayload(jid={})".format(data["jid"]) + creds_map = None if multiprocessing_enabled: if salt.utils.platform.spawning_platform(): # let python reconstruct the minion on the other side if we're # running on windows instance = None + creds_map = salt.crypt.AsyncAuth.creds_map with default_signals(signal.SIGINT, signal.SIGTERM): process = SignalHandlingProcess( target=self._target, name=name, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), ) else: process = threading.Thread( target=self._target, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), name=name, ) diff --git a/salt/minion.py b/salt/minion.py index 2ee1387c6ec..d85f5341cba 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -1763,6 +1763,7 @@ class Minion(MinionBase): # python needs to be able to reconstruct the reference on the other # side. instance = self + creds_map = None multiprocessing_enabled = self.opts.get("multiprocessing", True) name = "ProcessPayload(jid={})".format(data["jid"]) if multiprocessing_enabled: @@ -1770,17 +1771,18 @@ class Minion(MinionBase): # let python reconstruct the minion on the other side if we're # running on windows instance = None + creds_map = salt.crypt.AsyncAuth.creds_map with default_signals(signal.SIGINT, signal.SIGTERM): process = SignalHandlingProcess( target=self._target, name=name, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), ) process.register_after_fork_method(salt.utils.crypt.reinit_crypto) else: process = threading.Thread( target=self._target, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), name=name, ) @@ -1804,7 +1806,9 @@ class Minion(MinionBase): return exitstack @classmethod - def _target(cls, minion_instance, opts, data, connected): + def _target(cls, minion_instance, opts, data, connected, creds_map): + if creds_map: + salt.crypt.AsyncAuth.creds_map = creds_map if not minion_instance: minion_instance = cls(opts, load_grains=False) minion_instance.connected = connected @@ -3879,10 +3883,10 @@ class ProxyMinion(Minion): return mp_call(self, data) @classmethod - def _target(cls, minion_instance, opts, data, connected): + def _target(cls, minion_instance, opts, data, connected, creds_map): mp_call = _metaproxy_call(opts, "target") - return mp_call(cls, minion_instance, opts, data, connected) + return mp_call(cls, minion_instance, opts, data, connected, creds_map) @classmethod def _thread_return(cls, minion_instance, opts, data): diff --git a/tests/pytests/integration/minion/test_reauth.py b/tests/pytests/integration/minion/test_reauth.py new file mode 100644 index 00000000000..cebc9f3820d --- /dev/null +++ b/tests/pytests/integration/minion/test_reauth.py @@ -0,0 +1,49 @@ +import time + + +def test_reauth(salt_master_factory, event_listener): + """ + Validate non of our platform need to re-authenticate when runing a job with + multiprocessing=True. + """ + sls_name = "issue-64941" + sls_contents = """ + custom_test_state: + test.configurable_test_state: + - name: example + - changes: True + - result: True + - comment: "Nothing has acutally been changed" + """ + events = [] + + def handler(data): + events.append(data) + + event_listener.register_auth_event_handler("test_reauth-master", handler) + master = salt_master_factory.salt_master_daemon( + "test_reauth-master", + overrides={"log_level": "info"}, + ) + sls_tempfile = master.state_tree.base.temp_file( + "{}.sls".format(sls_name), sls_contents + ) + minion = master.salt_minion_daemon( + "test_reauth-minion", + overrides={"log_level": "info"}, + ) + cli = master.salt_cli() + start_time = time.time() + with master.started(), minion.started(): + events = event_listener.get_events( + [(master.id, "salt/auth")], + after_time=start_time, + ) + num_auth = len(events) + proc = cli.run("state.sls", sls_name, minion_tgt="*") + assert proc.returncode == 1 + events = event_listener.get_events( + [(master.id, "salt/auth")], + after_time=start_time, + ) + assert num_auth == len(events) From 20f9b6a8af222addab938e310be2d0a4fc236911 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 11 Aug 2023 14:10:03 -0700 Subject: [PATCH 55/73] Give tests a better chance of passing --- tests/pytests/integration/cli/test_matcher.py | 2 +- tests/pytests/integration/minion/test_reauth.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytests/integration/cli/test_matcher.py b/tests/pytests/integration/cli/test_matcher.py index 6b3b21bbaa6..2e91eba7ad4 100644 --- a/tests/pytests/integration/cli/test_matcher.py +++ b/tests/pytests/integration/cli/test_matcher.py @@ -401,7 +401,7 @@ def test_grains_targeting_minion_id_disconnected(salt_master, salt_minion, salt_ "-G", "test.ping", minion_tgt="id:{}".format(disconnected_minion_id), - _timeout=15, + _timeout=30, ) assert ret.returncode == 1 assert disconnected_minion_id in ret.data diff --git a/tests/pytests/integration/minion/test_reauth.py b/tests/pytests/integration/minion/test_reauth.py index cebc9f3820d..14a1e873551 100644 --- a/tests/pytests/integration/minion/test_reauth.py +++ b/tests/pytests/integration/minion/test_reauth.py @@ -13,7 +13,7 @@ def test_reauth(salt_master_factory, event_listener): - name: example - changes: True - result: True - - comment: "Nothing has acutally been changed" + - comment: "Nothing has actually been changed" """ events = [] From 0136a0cd4bc5e551b7a8b6206148ca86ea76e91a Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 14 Aug 2023 18:32:09 +0100 Subject: [PATCH 56/73] Merge freeze(3005.2) into 3006.x --- CHANGELOG.md | 48 +++++++++++++++++++++++++++++++++- doc/topics/releases/3005.2.rst | 41 +++++++++++++++++++++++++++++ 2 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 doc/topics/releases/3005.2.rst diff --git a/CHANGELOG.md b/CHANGELOG.md index 919ecdabe94..8374cd0c98c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,6 +42,39 @@ Versions are `MAJOR.PATCH`. Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) +## Salt v3005.2 (2023-07-31) + +### Changed + +- Additional required package upgrades + + * It's now `pyzmq>=20.0.0` on all platforms, and `<=22.0.3` just for windows. + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. (#63757) + + +### Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection (cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. (cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q (#64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 (#64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 + + Python 3.5 cannot get the updated requirements since certifi no longer supports this python version (#64720) + + ## 3006.1 (2023-05-05) @@ -77,7 +110,6 @@ Versions are `MAJOR.PATCH`. - Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) - ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) - ## 3006.0 (2023-04-18) @@ -1460,6 +1492,20 @@ Versions are `MAJOR.PATCH`. - Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +## Salt v3005.1-2 (2022-11-04) + +Note: This release is only impacting the packages not the Salt code base. + +### Fixed + +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang (#62937) +- Onedir Package Fix: Fix "No such file or directory" error on Rhel installs. (#62948) + +### Security + +- Update the onedir packages Python version to 3.8.15 for Windows and 3.9.15 for Linux and Mac + + ## Salt 3005.1 (2022-09-26) ### Fixed diff --git a/doc/topics/releases/3005.2.rst b/doc/topics/releases/3005.2.rst new file mode 100644 index 00000000000..d802408f9c8 --- /dev/null +++ b/doc/topics/releases/3005.2.rst @@ -0,0 +1,41 @@ +.. _release-3005-2: + +========================= +Salt 3005.2 Release Notes +========================= + +Version 3005.2 is a CVE security fix release for :ref:`3005 `. + + +Changed +------- + +- Additional required package upgrades + + * It's now `pyzmq>=20.0.0` on all platforms, and `<=22.0.3` just for windows. + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. (#63757) + + +Security +-------- + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection (cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. (cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q (#64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 (#64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 + + Python 3.5 cannot get the updated requirements since certifi no longer supports this python version (#64720) + From 90eb7695e321e51d3f2a4de05481271ec168b386 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 15 Aug 2023 09:39:20 +0100 Subject: [PATCH 57/73] Fix pre-commit issue Signed-off-by: Pedro Algarvio --- doc/topics/releases/3005.2.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/topics/releases/3005.2.rst b/doc/topics/releases/3005.2.rst index d802408f9c8..b267ee3978a 100644 --- a/doc/topics/releases/3005.2.rst +++ b/doc/topics/releases/3005.2.rst @@ -38,4 +38,3 @@ Security - Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 Python 3.5 cannot get the updated requirements since certifi no longer supports this python version (#64720) - From 4f8c2a428020e48d9d9172d62eb17fecbe9366ad Mon Sep 17 00:00:00 2001 From: twangboy Date: Mon, 14 Aug 2023 10:33:42 -0600 Subject: [PATCH 58/73] Restore current path, rather than removing from path --- .../functional/states/test_chocolatey_1_2_1.py | 17 +++++++++++++---- .../functional/states/test_chocolatey_latest.py | 17 +++++++++++++---- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/tests/pytests/functional/states/test_chocolatey_1_2_1.py b/tests/pytests/functional/states/test_chocolatey_1_2_1.py index bdb4e74ea88..9dcc186636a 100644 --- a/tests/pytests/functional/states/test_chocolatey_1_2_1.py +++ b/tests/pytests/functional/states/test_chocolatey_1_2_1.py @@ -7,6 +7,7 @@ import pathlib import pytest import salt.utils.path +import salt.utils.win_reg pytestmark = [ pytest.mark.windows_whitelisted, @@ -24,6 +25,11 @@ def chocolatey(states): @pytest.fixture(scope="module") def chocolatey_mod(modules): + current_path = salt.utils.win_reg.read_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + )["vdata"] url = "https://packages.chocolatey.org/chocolatey.1.2.1.nupkg" with pytest.helpers.temp_file(name="choco.nupkg") as nupkg: choco_pkg = pathlib.Path(str(nupkg)) @@ -67,10 +73,13 @@ def chocolatey_mod(modules): modules.environ.setval( key=env_var, val=False, false_unsets=True, permanent="HKCU" ) - # Remove Chocolatey from the Path - for path in modules.win_path.get_path(): - if "chocolatey" in path.lower(): - modules.win_path.remove(path=path, rehash=True) + salt.utils.win_reg.set_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + vdata=current_path, + ) + modules.win_path.rehash() # Remove unknown version if salt.utils.path.which("choco.exe"): diff --git a/tests/pytests/functional/states/test_chocolatey_latest.py b/tests/pytests/functional/states/test_chocolatey_latest.py index d388b362f6c..9d329d5fc59 100644 --- a/tests/pytests/functional/states/test_chocolatey_latest.py +++ b/tests/pytests/functional/states/test_chocolatey_latest.py @@ -7,6 +7,7 @@ import pathlib import pytest import salt.utils.path +import salt.utils.win_reg pytestmark = [ pytest.mark.windows_whitelisted, @@ -24,6 +25,11 @@ def chocolatey(states): @pytest.fixture(scope="module") def chocolatey_mod(modules): + current_path = salt.utils.win_reg.read_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + )["vdata"] url = "https://community.chocolatey.org/api/v2/package/chocolatey/" with pytest.helpers.temp_file(name="choco.nupkg") as nupkg: choco_pkg = pathlib.Path(str(nupkg)) @@ -67,10 +73,13 @@ def chocolatey_mod(modules): modules.environ.setval( key=env_var, val=False, false_unsets=True, permanent="HKCU" ) - # Remove Chocolatey from the Path - for path in modules.win_path.get_path(): - if "chocolatey" in path.lower(): - modules.win_path.remove(path=path, rehash=True) + salt.utils.win_reg.set_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + vdata=current_path, + ) + modules.win_path.rehash() # Remove unknown version if salt.utils.path.which("choco.exe"): From 7e76dc9ea7c60a9a9f9bcb5e13c0fe206c8c92e2 Mon Sep 17 00:00:00 2001 From: twangboy Date: Mon, 14 Aug 2023 19:12:20 -0600 Subject: [PATCH 59/73] Skip to see if chocolatey tests are causing the failure --- tests/pytests/functional/states/test_chocolatey_1_2_1.py | 1 + tests/pytests/functional/states/test_chocolatey_latest.py | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/pytests/functional/states/test_chocolatey_1_2_1.py b/tests/pytests/functional/states/test_chocolatey_1_2_1.py index 9dcc186636a..04e1d3ece1a 100644 --- a/tests/pytests/functional/states/test_chocolatey_1_2_1.py +++ b/tests/pytests/functional/states/test_chocolatey_1_2_1.py @@ -14,6 +14,7 @@ pytestmark = [ pytest.mark.skip_unless_on_windows, pytest.mark.slow_test, pytest.mark.destructive_test, + pytest.mark.skip_on_windows, ] diff --git a/tests/pytests/functional/states/test_chocolatey_latest.py b/tests/pytests/functional/states/test_chocolatey_latest.py index 9d329d5fc59..6e2f55405d8 100644 --- a/tests/pytests/functional/states/test_chocolatey_latest.py +++ b/tests/pytests/functional/states/test_chocolatey_latest.py @@ -14,6 +14,7 @@ pytestmark = [ pytest.mark.skip_unless_on_windows, pytest.mark.slow_test, pytest.mark.destructive_test, + pytest.mark.skip_on_windows, ] From a2102e0ca3c7f263bc738a1c6d2551365773b0b7 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Thu, 10 Aug 2023 22:44:21 -0700 Subject: [PATCH 60/73] Preserve test condition durring sub-state runs --- salt/loader/lazy.py | 14 ++++- salt/state.py | 17 ++---- .../integration/states/test_x509_v2.py | 59 +++++++++++++++++++ 3 files changed, 77 insertions(+), 13 deletions(-) diff --git a/salt/loader/lazy.py b/salt/loader/lazy.py index d319fe54b42..c1bcace010f 100644 --- a/salt/loader/lazy.py +++ b/salt/loader/lazy.py @@ -144,9 +144,21 @@ class LoadedFunc: def __call__(self, *args, **kwargs): run_func = self.func + mod = sys.modules[run_func.__module__] + if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): + if mod.__opts__.value() is not None: + mod.__opts__.value()["test"] = self.loader.opts["test"] + else: + mod.__opts__["test"] = self.loader.opts["test"] if self.loader.inject_globals: run_func = global_injector_decorator(self.loader.inject_globals)(run_func) - return self.loader.run(run_func, *args, **kwargs) + ret = self.loader.run(run_func, *args, **kwargs) + if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): + if mod.__opts__.value() is not None: + self.loader.opts["test"] = mod.__opts__["test"] + else: + self.loader.opts["test"] = mod.__opts__["test"] + return ret def __repr__(self): return "<{} name={!r}>".format(self.__class__.__name__, self.name) diff --git a/salt/state.py b/salt/state.py index 868be2749e9..1774e07061d 100644 --- a/salt/state.py +++ b/salt/state.py @@ -11,7 +11,6 @@ The data sent to the state calls is as follows: } """ - import copy import datetime import fnmatch @@ -22,7 +21,6 @@ import os import random import re import site -import sys import time import traceback @@ -2141,9 +2139,7 @@ class State: if "retry" in low: retries = 1 low["retry"] = instance.verify_retry_data(low["retry"]) - if not sys.modules[instance.states[cdata["full"]].__module__].__opts__[ - "test" - ]: + if not instance.states.opts["test"]: while low["retry"]["attempts"] >= retries: if low["retry"]["until"] == ret["result"]: @@ -2322,8 +2318,8 @@ class State: inject_globals.update(self.inject_globals) if low.get("__prereq__"): - test = sys.modules[self.states[cdata["full"]].__module__].__opts__["test"] - sys.modules[self.states[cdata["full"]].__module__].__opts__["test"] = True + test = self.states.opts["test"] + self.states.opts["test"] = True try: # Let's get a reference to the salt environment to use within this # state call. @@ -2419,10 +2415,7 @@ class State: } finally: if low.get("__prereq__"): - sys.modules[self.states[cdata["full"]].__module__].__opts__[ - "test" - ] = test - + self.states.opts["test"] = test self.state_con.pop("runas", None) self.state_con.pop("runas_password", None) @@ -2463,7 +2456,7 @@ class State: ) if "retry" in low and "parallel" not in low: low["retry"] = self.verify_retry_data(low["retry"]) - if not sys.modules[self.states[cdata["full"]].__module__].__opts__["test"]: + if not self.states.opts["test"]: if low["retry"]["until"] != ret["result"]: if low["retry"]["attempts"] > retries: interval = low["retry"]["interval"] diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py index 9a1c09bb8bd..5e82af0eac3 100644 --- a/tests/pytests/integration/states/test_x509_v2.py +++ b/tests/pytests/integration/states/test_x509_v2.py @@ -46,6 +46,65 @@ def x509_pkidir(tmp_path_factory): shutil.rmtree(str(_x509_pkidir), ignore_errors=True) +@pytest.fixture(params=[{}]) +def existing_privkey(x509_salt_call_cli, request, tmp_path): + pk_file = tmp_path / "priv.key" + pk_args = {"name": str(pk_file)} + pk_args.update(request.param) + ret = x509_salt_call_cli.run("state.single", "x509.private_key_managed", **pk_args) + assert ret.returncode == 0 + assert pk_file.exists() + yield pk_args["name"] + + +def test_file_managed_does_not_run_in_test_mode_after_x509_v2_invocation_without_changes( + x509_salt_master, x509_salt_call_cli, tmp_path, existing_privkey +): + """ + The x509_v2 state module tries to workaround issue #62590 (Test mode does + not propagate to __states__ when using prereq) by invoking the ``state.single`` + execution module with an explicit test parameter. In some cases, this seems + to trigger another bug: The file module always runs in test mode afterwards. + This seems to be the case when the x509_v2 state module does not report changes + after having been invoked at least once before, until another x509_v2 call results + in a ``file.managed`` call without test mode. + Issue #64195. + """ + new_privkey = tmp_path / "new_privkey" + new_file = tmp_path / "new_file" + assert not new_file.exists() + state = f"""\ + # The result of this call is irrelevant, just that it exists + Some private key is present: + x509.private_key_managed: + - name: {new_privkey} + # This single call without changes does not trigger the bug on its own + Another private key is (already) present: + x509.private_key_managed: + - name: {existing_privkey} + Subsequent file.managed call should not run in test mode: + file.managed: + - name: {new_file} + - contents: foo + - require: + - Another private key is (already) present + """ + with x509_salt_master.state_tree.base.temp_file("file_managed_test.sls", state): + ret = x509_salt_call_cli.run("state.apply", "file_managed_test") + assert ret.returncode == 0 + assert ret.data + x509_res = next(ret.data[x] for x in ret.data if x.startswith("x509_|-Another")) + assert x509_res["result"] is True + assert not x509_res["changes"] + file_res = next( + ret.data[x] for x in ret.data if x.startswith("file_|-Subsequent") + ) + assert file_res["result"] is True + assert file_res["changes"] + assert new_file.exists() + assert new_file.read_text() == "foo\n" + + @pytest.fixture(scope="module", autouse=True) def x509_data( x509_pkidir, From 48872effaa7f222374dae68ed0a5ed118028000d Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 11 Aug 2023 01:38:55 -0700 Subject: [PATCH 61/73] Add regression test for 62590 --- .../integration/states/test_state_test.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 tests/pytests/integration/states/test_state_test.py diff --git a/tests/pytests/integration/states/test_state_test.py b/tests/pytests/integration/states/test_state_test.py new file mode 100644 index 00000000000..6455d4da7c1 --- /dev/null +++ b/tests/pytests/integration/states/test_state_test.py @@ -0,0 +1,34 @@ +def test_issue_62590(salt_master, salt_minion, salt_cli): + + statepy = """ + # _states/test2.py + import logging + log = logging.getLogger(__name__) + + def call_another(name, m_name, **kwargs): + ret = __states__[m_name](name, **kwargs) + log.info(f'{__opts__["test"]}: {ret}') + return ret + """ + statesls = """ + run indirect: + test2.call_another: + - m_name: test.succeed_with_changes + + run prereq: + test2.call_another: + - m_name: test.succeed_with_changes + + nop: + test.nop: + - prereq: + - run prereq + """ + with salt_master.state_tree.base.temp_file( + "_states/test2.py", statepy + ), salt_master.state_tree.base.temp_file("test_62590.sls", statesls): + ret = salt_cli.run("saltutil.sync_all", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + ret = salt_cli.run("state.apply", "test_62590", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert "Success!" == ret.data["test_|-nop_|-nop_|-nop"]["comment"] From dcae0bfc1e46c419af1e35f04049d7c99bbb495b Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 11 Aug 2023 01:40:06 -0700 Subject: [PATCH 62/73] Add changelog for 62590 --- changelog/62590.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/62590.fixed.md diff --git a/changelog/62590.fixed.md b/changelog/62590.fixed.md new file mode 100644 index 00000000000..08b46d69651 --- /dev/null +++ b/changelog/62590.fixed.md @@ -0,0 +1 @@ +Preserve test=True condition while running sub states. From f08f374e973d0daf198b1a415979dcc95fb367d1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 11 Aug 2023 02:39:20 -0700 Subject: [PATCH 63/73] Address review nits --- tests/pytests/integration/states/test_x509_v2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py index 5e82af0eac3..be01852919b 100644 --- a/tests/pytests/integration/states/test_x509_v2.py +++ b/tests/pytests/integration/states/test_x509_v2.py @@ -73,7 +73,7 @@ def test_file_managed_does_not_run_in_test_mode_after_x509_v2_invocation_without new_privkey = tmp_path / "new_privkey" new_file = tmp_path / "new_file" assert not new_file.exists() - state = f"""\ + state = f""" # The result of this call is irrelevant, just that it exists Some private key is present: x509.private_key_managed: From 33cf8a2dc794041c680a2ea54a19026468b7666f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 11 Aug 2023 14:00:38 -0700 Subject: [PATCH 64/73] Account for times where __opts__ is not defined This can happen when we mock things in the test suite --- salt/loader/lazy.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/salt/loader/lazy.py b/salt/loader/lazy.py index c1bcace010f..9a4a8f8bf38 100644 --- a/salt/loader/lazy.py +++ b/salt/loader/lazy.py @@ -145,19 +145,24 @@ class LoadedFunc: def __call__(self, *args, **kwargs): run_func = self.func mod = sys.modules[run_func.__module__] - if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): - if mod.__opts__.value() is not None: - mod.__opts__.value()["test"] = self.loader.opts["test"] - else: - mod.__opts__["test"] = self.loader.opts["test"] + # All modules we've imported should have __opts__ defined. There are + # cases in the test suite where mod ends up being something other than + # a module we've loaded. + if hasattr(mod, "__opts__"): + if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): + if mod.__opts__.value() is not None: + mod.__opts__.value()["test"] = self.loader.opts["test"] + else: + mod.__opts__["test"] = self.loader.opts["test"] if self.loader.inject_globals: run_func = global_injector_decorator(self.loader.inject_globals)(run_func) ret = self.loader.run(run_func, *args, **kwargs) - if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): - if mod.__opts__.value() is not None: + if hasattr(mod, "__opts__"): + if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): + if mod.__opts__.value() is not None: + self.loader.opts["test"] = mod.__opts__["test"] + else: self.loader.opts["test"] = mod.__opts__["test"] - else: - self.loader.opts["test"] = mod.__opts__["test"] return ret def __repr__(self): From 3996876cb7e97f722c65656c200dbe7019115c9f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 14 Aug 2023 14:33:37 -0700 Subject: [PATCH 65/73] Do not patch test if we have a named loader for __opts__ --- salt/loader/lazy.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/salt/loader/lazy.py b/salt/loader/lazy.py index 9a4a8f8bf38..8198b4f62e9 100644 --- a/salt/loader/lazy.py +++ b/salt/loader/lazy.py @@ -148,21 +148,17 @@ class LoadedFunc: # All modules we've imported should have __opts__ defined. There are # cases in the test suite where mod ends up being something other than # a module we've loaded. + set_test = False if hasattr(mod, "__opts__"): - if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): - if mod.__opts__.value() is not None: - mod.__opts__.value()["test"] = self.loader.opts["test"] - else: - mod.__opts__["test"] = self.loader.opts["test"] + if not isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): + if "test" in self.loader.opts: + mod.__opts__["test"] = self.loader.opts["test"] + set_test = True if self.loader.inject_globals: run_func = global_injector_decorator(self.loader.inject_globals)(run_func) ret = self.loader.run(run_func, *args, **kwargs) - if hasattr(mod, "__opts__"): - if isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): - if mod.__opts__.value() is not None: - self.loader.opts["test"] = mod.__opts__["test"] - else: - self.loader.opts["test"] = mod.__opts__["test"] + if set_test: + self.loader.opts["test"] = mod.__opts__["test"] return ret def __repr__(self): From 9d7bec1a6f8529f9beb329a5b7ba3b7bf8d8dcd6 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 10 Aug 2023 08:35:57 +0100 Subject: [PATCH 66/73] `salt-pip` now properly errors out when being called from a non `onedir` environment. Fixes #64249 Signed-off-by: Pedro Algarvio --- changelog/64249.fixed.md | 1 + salt/scripts.py | 22 +++++++++++-- tests/pytests/functional/cli/test_salt_pip.py | 31 +++++++++++++++++++ 3 files changed, 51 insertions(+), 3 deletions(-) create mode 100644 changelog/64249.fixed.md create mode 100644 tests/pytests/functional/cli/test_salt_pip.py diff --git a/changelog/64249.fixed.md b/changelog/64249.fixed.md new file mode 100644 index 00000000000..9f01a6146cb --- /dev/null +++ b/changelog/64249.fixed.md @@ -0,0 +1 @@ +`salt-pip` now properly errors out when being called from a non `onedir` environment. diff --git a/salt/scripts.py b/salt/scripts.py index 07393373c9d..176cab56366 100644 --- a/salt/scripts.py +++ b/salt/scripts.py @@ -1,8 +1,7 @@ """ This module contains the function calls to execute command line scripts """ - - +import contextlib import functools import logging import os @@ -608,11 +607,28 @@ def _pip_environment(env, extras): return new_env +def _get_onedir_env_path(): + # This function only exists to simplify testing. + with contextlib.suppress(AttributeError): + return sys.RELENV + return None + + def salt_pip(): """ Proxy to current python's pip """ - extras = str(sys.RELENV / "extras-{}.{}".format(*sys.version_info)) + relenv_path = _get_onedir_env_path() + if relenv_path is None: + print( + "'salt-pip' is only meant to be used from a Salt onedir. You probably " + "want to use the system 'pip` binary.", + file=sys.stderr, + flush=True, + ) + sys.exit(salt.defaults.exitcodes.EX_GENERIC) + else: + extras = str(relenv_path / "extras-{}.{}".format(*sys.version_info)) env = _pip_environment(os.environ.copy(), extras) args = _pip_args(sys.argv[1:], extras) command = [ diff --git a/tests/pytests/functional/cli/test_salt_pip.py b/tests/pytests/functional/cli/test_salt_pip.py new file mode 100644 index 00000000000..22284d8488a --- /dev/null +++ b/tests/pytests/functional/cli/test_salt_pip.py @@ -0,0 +1,31 @@ +import os + +import pytest + +import salt.scripts +import salt.utils.platform +from tests.conftest import CODE_DIR +from tests.support.mock import patch + + +def test_within_onedir_env(shell): + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + return + + script_name = "salt-pip" + if salt.utils.platform.is_windows(): + script_name += ".exe" + + script_path = CODE_DIR / "artifacts" / "salt" / script_name + assert script_path.exists() + + ret = shell.run(str(script_path), "list") + assert ret.returncode == 0 + + +def test_outside_onedir_env(capsys): + with patch("salt.scripts._get_onedir_env_path", return_value=None): + with pytest.raises(SystemExit) as exc: + salt.scripts.salt_pip() + captured = capsys.readouterr() + assert "'salt-pip' is only meant to be used from a Salt onedir." in captured.err From 05231cfe5c52bd4748098a047b3bb30971ccff70 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 14 Aug 2023 19:49:46 -0700 Subject: [PATCH 67/73] Bump relenv to 0.13.4 --- .github/workflows/ci.yml | 12 ++++++------ .github/workflows/nightly.yml | 12 ++++++------ .github/workflows/scheduled.yml | 12 ++++++------ .github/workflows/staging.yml | 12 ++++++------ cicd/shared-gh-workflows-context.yml | 2 +- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ed1da2442e1..c55377efdd8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -442,7 +442,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-salt-onedir: @@ -458,7 +458,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-rpm-pkgs: @@ -470,7 +470,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-deb-pkgs: @@ -482,7 +482,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-windows-pkgs: @@ -494,7 +494,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-macos-pkgs: @@ -506,7 +506,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" amazonlinux-2-pkg-tests: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e5944ed5b9f..ae20b22fc7d 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -491,7 +491,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-salt-onedir: @@ -507,7 +507,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-rpm-pkgs: @@ -519,7 +519,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-deb-pkgs: @@ -531,7 +531,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-windows-pkgs: @@ -543,7 +543,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" environment: nightly sign-packages: false @@ -558,7 +558,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" environment: nightly sign-packages: true diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 7dfa7db3274..9a22472b2da 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -476,7 +476,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-salt-onedir: @@ -492,7 +492,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-rpm-pkgs: @@ -504,7 +504,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-deb-pkgs: @@ -516,7 +516,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-windows-pkgs: @@ -528,7 +528,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-macos-pkgs: @@ -540,7 +540,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" amazonlinux-2-pkg-tests: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index be2b1709e07..245b1bf1cef 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -486,7 +486,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-salt-onedir: @@ -502,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-rpm-pkgs: @@ -514,7 +514,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-deb-pkgs: @@ -526,7 +526,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" build-windows-pkgs: @@ -538,7 +538,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" environment: staging sign-packages: ${{ inputs.sign-windows-packages }} @@ -553,7 +553,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.3" + relenv-version: "0.13.4" python-version: "3.10.12" environment: staging sign-packages: true diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index da2158b216a..44a645e4e21 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,2 +1,2 @@ python_version: "3.10.12" -relenv_version: "0.13.3" +relenv_version: "0.13.4" From ad4821b87f85e70a979eb3c1c535ef21379efe61 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 16 Aug 2023 09:16:19 +0100 Subject: [PATCH 68/73] Run `pyupgrade` on the files changed on the merge forward Signed-off-by: Pedro Algarvio --- salt/_logging/impl.py | 49 ++++++--------- salt/beacons/__init__.py | 36 +++++------ salt/channel/server.py | 2 +- salt/crypt.py | 6 +- salt/runners/manage.py | 11 ++-- salt/scripts.py | 8 +-- salt/state.py | 58 +++++++++--------- salt/transport/base.py | 14 ++--- salt/utils/asynchronous.py | 2 +- salt/utils/event.py | 18 +++--- salt/utils/network.py | 36 +++++------ tests/pytests/conftest.py | 10 ++-- .../pytests/functional/channel/test_server.py | 2 +- tests/pytests/functional/cli/test_batch.py | 2 +- .../transport/ipc/test_pub_server_channel.py | 8 +-- .../pytests/integration/minion/test_reauth.py | 4 +- .../multimaster/test_offline_master.py | 4 +- .../unit/channel/test_request_channel.py | 59 ++++++++++++------- .../unit/transport/test_publish_client.py | 2 +- tests/pytests/unit/utils/test_gitfs.py | 4 +- tests/support/helpers.py | 24 ++++---- tests/support/pytest/transport.py | 2 +- 22 files changed, 172 insertions(+), 189 deletions(-) diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py index 6027e352909..f1302222df1 100644 --- a/salt/_logging/impl.py +++ b/salt/_logging/impl.py @@ -109,9 +109,9 @@ DFLT_LOG_FMT_LOGFILE = "%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(lev class SaltLogRecord(logging.LogRecord): def __init__(self, *args, **kwargs): logging.LogRecord.__init__(self, *args, **kwargs) - self.bracketname = "[{:<17}]".format(str(self.name)) - self.bracketlevel = "[{:<8}]".format(str(self.levelname)) - self.bracketprocess = "[{:>5}]".format(str(self.process)) + self.bracketname = f"[{str(self.name):<17}]" + self.bracketlevel = f"[{str(self.levelname):<8}]" + self.bracketprocess = f"[{str(self.process):>5}]" class SaltColorLogRecord(SaltLogRecord): @@ -125,11 +125,11 @@ class SaltColorLogRecord(SaltLogRecord): self.colorname = "{}[{:<17}]{}".format( LOG_COLORS["name"], str(self.name), reset ) - self.colorlevel = "{}[{:<8}]{}".format(clevel, str(self.levelname), reset) + self.colorlevel = f"{clevel}[{str(self.levelname):<8}]{reset}" self.colorprocess = "{}[{:>5}]{}".format( LOG_COLORS["process"], str(self.process), reset ) - self.colormsg = "{}{}{}".format(cmsg, self.getMessage(), reset) + self.colormsg = f"{cmsg}{self.getMessage()}{reset}" def get_log_record_factory(): @@ -275,27 +275,16 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta): else: extra["exc_info_on_loglevel"] = exc_info_on_loglevel - if sys.version_info < (3, 8): - LOGGING_LOGGER_CLASS._log( - self, - level, - msg, - args, - exc_info=exc_info, - extra=extra, - stack_info=stack_info, - ) - else: - LOGGING_LOGGER_CLASS._log( - self, - level, - msg, - args, - exc_info=exc_info, - extra=extra, - stack_info=stack_info, - stacklevel=stacklevel, - ) + LOGGING_LOGGER_CLASS._log( + self, + level, + msg, + args, + exc_info=exc_info, + extra=extra, + stack_info=stack_info, + stacklevel=stacklevel, + ) def makeRecord( self, @@ -738,7 +727,7 @@ def setup_logfile_handler( syslog_opts["address"] = str(path.resolve().parent) except OSError as exc: raise LoggingRuntimeError( - "Failed to setup the Syslog logging handler: {}".format(exc) + f"Failed to setup the Syslog logging handler: {exc}" ) from exc elif parsed_log_path.path: # In case of udp or tcp with a facility specified @@ -748,7 +737,7 @@ def setup_logfile_handler( # Logging facilities start with LOG_ if this is not the case # fail right now! raise LoggingRuntimeError( - "The syslog facility '{}' is not known".format(facility_name) + f"The syslog facility '{facility_name}' is not known" ) else: # This is the case of udp or tcp without a facility specified @@ -759,7 +748,7 @@ def setup_logfile_handler( # This python syslog version does not know about the user provided # facility name raise LoggingRuntimeError( - "The syslog facility '{}' is not known".format(facility_name) + f"The syslog facility '{facility_name}' is not known" ) syslog_opts["facility"] = facility @@ -779,7 +768,7 @@ def setup_logfile_handler( handler = SysLogHandler(**syslog_opts) except OSError as exc: raise LoggingRuntimeError( - "Failed to setup the Syslog logging handler: {}".format(exc) + f"Failed to setup the Syslog logging handler: {exc}" ) from exc else: # make sure, the logging directory exists and attempt to create it if necessary diff --git a/salt/beacons/__init__.py b/salt/beacons/__init__.py index c958087517a..deeb72f54f3 100644 --- a/salt/beacons/__init__.py +++ b/salt/beacons/__init__.py @@ -74,7 +74,7 @@ class Beacon: # Run the validate function if it's available, # otherwise there is a warning about it being missing - validate_str = "{}.validate".format(beacon_name) + validate_str = f"{beacon_name}.validate" if validate_str in self.beacons: valid, vcomment = self.beacons[validate_str](b_config[mod]) @@ -95,7 +95,7 @@ class Beacon: continue b_config[mod].append({"_beacon_name": mod}) - fun_str = "{}.beacon".format(beacon_name) + fun_str = f"{beacon_name}.beacon" if fun_str in self.beacons: runonce = self._determine_beacon_config( current_beacon_config, "run_once" @@ -124,7 +124,7 @@ class Beacon: if re.match("state.*", job["fun"]): is_running = True if is_running: - close_str = "{}.close".format(beacon_name) + close_str = f"{beacon_name}.close" if close_str in self.beacons: log.info("Closing beacon %s. State run in progress.", mod) self.beacons[close_str](b_config[mod]) @@ -139,7 +139,7 @@ class Beacon: try: raw = self.beacons[fun_str](b_config[mod]) except: # pylint: disable=bare-except - error = "{}".format(sys.exc_info()[1]) + error = f"{sys.exc_info()[1]}" log.error("Unable to start %s beacon, %s", mod, error) # send beacon error event tag = "salt/beacon/{}/{}/".format(self.opts["id"], mod) @@ -308,7 +308,7 @@ class Beacon: """ beacon_name = next(item.get("beacon_module", name) for item in beacon_data) - validate_str = "{}.validate".format(beacon_name) + validate_str = f"{beacon_name}.validate" # Run the validate function if it's available, # otherwise there is a warning about it being missing if validate_str in self.beacons: @@ -347,9 +347,9 @@ class Beacon: complete = False else: if name in self.opts["beacons"]: - comment = "Updating settings for beacon item: {}".format(name) + comment = f"Updating settings for beacon item: {name}" else: - comment = "Added new beacon item: {}".format(name) + comment = f"Added new beacon item: {name}" complete = True self.opts["beacons"].update(data) @@ -375,12 +375,10 @@ class Beacon: data[name] = beacon_data if name in self._get_beacons(include_opts=False): - comment = ( - "Cannot modify beacon item {}, it is configured in pillar.".format(name) - ) + comment = f"Cannot modify beacon item {name}, it is configured in pillar." complete = False else: - comment = "Updating settings for beacon item: {}".format(name) + comment = f"Updating settings for beacon item: {name}" complete = True self.opts["beacons"].update(data) @@ -402,16 +400,14 @@ class Beacon: """ if name in self._get_beacons(include_opts=False): - comment = ( - "Cannot delete beacon item {}, it is configured in pillar.".format(name) - ) + comment = f"Cannot delete beacon item {name}, it is configured in pillar." complete = False else: if name in self.opts["beacons"]: del self.opts["beacons"][name] - comment = "Deleting beacon item: {}".format(name) + comment = f"Deleting beacon item: {name}" else: - comment = "Beacon item {} not found.".format(name) + comment = f"Beacon item {name} not found." complete = True # Fire the complete event back along with updated list of beacons @@ -465,13 +461,11 @@ class Beacon: """ if name in self._get_beacons(include_opts=False): - comment = ( - "Cannot enable beacon item {}, it is configured in pillar.".format(name) - ) + comment = f"Cannot enable beacon item {name}, it is configured in pillar." complete = False else: self._update_enabled(name, True) - comment = "Enabling beacon item {}".format(name) + comment = f"Enabling beacon item {name}" complete = True # Fire the complete event back along with updated list of beacons @@ -501,7 +495,7 @@ class Beacon: complete = False else: self._update_enabled(name, False) - comment = "Disabling beacon item {}".format(name) + comment = f"Disabling beacon item {name}" complete = True # Fire the complete event back along with updated list of beacons diff --git a/salt/channel/server.py b/salt/channel/server.py index adf48db198e..d1f3a7ac1c1 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -134,7 +134,7 @@ class ReqServerChannel: raise tornado.gen.Return("bad load: id contains a null byte") except TypeError: log.error("Payload contains non-string id: %s", payload) - raise tornado.gen.Return("bad load: id {} is not a string".format(id_)) + raise tornado.gen.Return(f"bad load: id {id_} is not a string") version = 0 if "version" in payload: diff --git a/salt/crypt.py b/salt/crypt.py index 6aa30fe258c..a509e496495 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -122,8 +122,8 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None): :return: Path on the filesystem to the RSA private key """ base = os.path.join(keydir, keyname) - priv = "{}.pem".format(base) - pub = "{}.pub".format(base) + priv = f"{base}.pem" + pub = f"{base}.pub" if HAS_M2: gen = RSA.gen_key(keysize, 65537, lambda: None) @@ -443,7 +443,7 @@ class MasterKeys(dict): try: key = get_rsa_key(path, passphrase) except key_error as e: - message = "Unable to read key: {}; passphrase may be incorrect".format(path) + message = f"Unable to read key: {path}; passphrase may be incorrect" log.error(message) raise MasterExit(message) log.debug("Loaded %s key: %s", name, path) diff --git a/salt/runners/manage.py b/salt/runners/manage.py index 3a0a201a4ef..b15573ed168 100644 --- a/salt/runners/manage.py +++ b/salt/runners/manage.py @@ -538,7 +538,7 @@ def safe_accept(target, tgt_type="glob"): del ret[minion] continue elif minion not in pending: - failures[minion] = "Minion key {} not found by salt-key".format(minion) + failures[minion] = f"Minion key {minion} not found by salt-key" elif pending[minion] != finger: failures[ minion @@ -560,7 +560,8 @@ def safe_accept(target, tgt_type="glob"): print("") __jid_event__.fire_event( - {"message": "Accepted {:d} keys".format(len(ret))}, "progress" + {"message": f"Accepted {len(ret):d} keys"}, + "progress", ) return ret, failures @@ -730,8 +731,8 @@ def bootstrap( client_opts["argv"] = [ "http.query", script, - "backend={}".format(http_backend), - "text_out={}".format(deploy_command), + f"backend={http_backend}", + f"text_out={deploy_command}", ] salt.client.ssh.SSH(client_opts).run() client_opts["argv"] = [ @@ -846,7 +847,7 @@ objShell.Exec("{1}{2}")""" vb_saltexec = "saltinstall.exe" vb_saltexec_args = " /S /minion-name=%COMPUTERNAME%" if master: - vb_saltexec_args += " /master={}".format(master) + vb_saltexec_args += f" /master={master}" # One further thing we need to do; the Windows Salt minion is pretty # self-contained, except for the Microsoft Visual C++ 2008 runtime. diff --git a/salt/scripts.py b/salt/scripts.py index 176cab56366..cc960f07854 100644 --- a/salt/scripts.py +++ b/salt/scripts.py @@ -414,7 +414,7 @@ def salt_key(): _install_signal_handlers(client) client.run() except Exception as err: # pylint: disable=broad-except - sys.stderr.write("Error: {}\n".format(err)) + sys.stderr.write(f"Error: {err}\n") def salt_cp(): @@ -572,7 +572,7 @@ def salt_unity(): if len(sys.argv) < 2: msg = "Must pass in a salt command, available commands are:" for cmd in avail: - msg += "\n{}".format(cmd) + msg += f"\n{cmd}" print(msg) sys.exit(1) cmd = sys.argv[1] @@ -581,9 +581,9 @@ def salt_unity(): sys.argv[0] = "salt" s_fun = salt_main else: - sys.argv[0] = "salt-{}".format(cmd) + sys.argv[0] = f"salt-{cmd}" sys.argv.pop(1) - s_fun = getattr(sys.modules[__name__], "salt_{}".format(cmd)) + s_fun = getattr(sys.modules[__name__], f"salt_{cmd}") s_fun() diff --git a/salt/state.py b/salt/state.py index 1774e07061d..ca3e295e558 100644 --- a/salt/state.py +++ b/salt/state.py @@ -158,8 +158,8 @@ def _clean_tag(tag): def _l_tag(name, id_): low = { - "name": "listen_{}".format(name), - "__id__": "listen_{}".format(id_), + "name": f"listen_{name}", + "__id__": f"listen_{id_}", "state": "Listen_Error", "fun": "Listen_Error", } @@ -1065,7 +1065,7 @@ class State: return ret elif isinstance(entry, dict): if "fun" not in entry: - ret["comment"] = "no `fun` argument in onlyif: {}".format(entry) + ret["comment"] = f"no `fun` argument in onlyif: {entry}" log.warning(ret["comment"]) return ret @@ -1143,7 +1143,7 @@ class State: return ret elif isinstance(entry, dict): if "fun" not in entry: - ret["comment"] = "no `fun` argument in unless: {}".format(entry) + ret["comment"] = f"no `fun` argument in unless: {entry}" log.warning(ret["comment"]) return ret @@ -1393,9 +1393,9 @@ class State: ) reason = self.states.missing_fun_string(full) if reason: - errors.append("Reason: {}".format(reason)) + errors.append(f"Reason: {reason}") else: - errors.append("Specified state '{}' was not found".format(full)) + errors.append(f"Specified state '{full}' was not found") else: # First verify that the parameters are met aspec = salt.utils.args.get_function_argspec(self.states[full]) @@ -2122,7 +2122,7 @@ class State: "result": False, "name": name, "changes": {}, - "comment": "An exception occurred in this state: {}".format(trb), + "comment": f"An exception occurred in this state: {trb}", } utc_finish_time = datetime.datetime.utcnow() @@ -2228,7 +2228,7 @@ class State: proc = salt.utils.process.Process( target=self._call_parallel_target, args=(instance, self._init_kwargs, name, cdata, low), - name="ParallelState({})".format(name), + name=f"ParallelState({name})", ) proc.start() ret = { @@ -2264,7 +2264,7 @@ class State: "comment": "", } for err in errors: - ret["comment"] += "{}\n".format(err) + ret["comment"] += f"{err}\n" ret["__run_num__"] = self.__run_num self.__run_num += 1 format_log(ret) @@ -2411,7 +2411,7 @@ class State: "result": False, "name": name, "changes": {}, - "comment": "An exception occurred in this state: {}".format(trb), + "comment": f"An exception occurred in this state: {trb}", } finally: if low.get("__prereq__"): @@ -3423,7 +3423,7 @@ class State: lkey, lval ) ), - "name": "listen_{}:{}".format(lkey, lval), + "name": f"listen_{lkey}:{lval}", "result": False, "changes": {}, } @@ -3538,9 +3538,7 @@ class State: return high, errors if not isinstance(high, dict): - errors.append( - "Template {} does not render to a dictionary".format(template) - ) + errors.append(f"Template {template} does not render to a dictionary") return high, errors invalid_items = ("include", "exclude", "extends") @@ -3915,10 +3913,10 @@ class BaseHighState: """ merging_strategy = self.opts["top_file_merging_strategy"] try: - merge_attr = "_merge_tops_{}".format(merging_strategy) + merge_attr = f"_merge_tops_{merging_strategy}" merge_func = getattr(self, merge_attr) if not hasattr(merge_func, "__call__"): - msg = "'{}' is not callable".format(merge_attr) + msg = f"'{merge_attr}' is not callable" log.error(msg) raise TypeError(msg) except (AttributeError, TypeError): @@ -4246,7 +4244,7 @@ class BaseHighState: fn_ = sls if not os.path.isfile(fn_): errors.append( - "Specified SLS {} on local filesystem cannot be found.".format(sls) + f"Specified SLS {sls} on local filesystem cannot be found." ) state = None if not fn_: @@ -4269,25 +4267,25 @@ class BaseHighState: context=context, ) except SaltRenderError as exc: - msg = "Rendering SLS '{}:{}' failed: {}".format(saltenv, sls, exc) + msg = f"Rendering SLS '{saltenv}:{sls}' failed: {exc}" log.critical(msg) errors.append(msg) except Exception as exc: # pylint: disable=broad-except - msg = "Rendering SLS {} failed, render error: {}".format(sls, exc) + msg = f"Rendering SLS {sls} failed, render error: {exc}" log.critical( msg, # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG, ) - errors.append("{}\n{}".format(msg, traceback.format_exc())) + errors.append(f"{msg}\n{traceback.format_exc()}") try: - mods.add("{}:{}".format(saltenv, sls)) + mods.add(f"{saltenv}:{sls}") except AttributeError: pass if state: if not isinstance(state, dict): - errors.append("SLS {} does not render to a dictionary".format(sls)) + errors.append(f"SLS {sls} does not render to a dictionary") else: include = [] if "include" in state: @@ -4390,7 +4388,7 @@ class BaseHighState: r_env = ( resolved_envs[0] if len(resolved_envs) == 1 else saltenv ) - mod_tgt = "{}:{}".format(r_env, sls_target) + mod_tgt = f"{r_env}:{sls_target}" if mod_tgt not in mods: nstate, err = self.render_state( sls_target, @@ -4490,7 +4488,7 @@ class BaseHighState: comps[0]: [comps[1]], } continue - errors.append("ID {} in SLS {} is not a dictionary".format(name, sls)) + errors.append(f"ID {name} in SLS {sls} is not a dictionary") continue skeys = set() for key in list(state[name]): @@ -4534,9 +4532,7 @@ class BaseHighState: if "extend" in state: ext = state.pop("extend") if not isinstance(ext, dict): - errors.append( - "Extension value in SLS '{}' is not a dictionary".format(sls) - ) + errors.append(f"Extension value in SLS '{sls}' is not a dictionary") return for name in ext: if not isinstance(ext[name], dict): @@ -4603,7 +4599,7 @@ class BaseHighState: statefiles = [sls_match] for sls in statefiles: - r_env = "{}:{}".format(saltenv, sls) + r_env = f"{saltenv}:{sls}" if r_env in mods: continue state, errors = self.render_state( @@ -4614,7 +4610,7 @@ class BaseHighState: for i, error in enumerate(errors[:]): if "is not available" in error: # match SLS foobar in environment - this_sls = "SLS {} in saltenv".format(sls_match) + this_sls = f"SLS {sls_match} in saltenv" if this_sls in error: errors[ i @@ -4659,7 +4655,7 @@ class BaseHighState: try: highstate.update(state) except ValueError: - errors.append("Error when rendering state with contents: {}".format(state)) + errors.append(f"Error when rendering state with contents: {state}") def _check_pillar(self, force=False): """ @@ -4712,7 +4708,7 @@ class BaseHighState: "__run_num__": 0, } } - cfn = os.path.join(self.opts["cachedir"], "{}.cache.p".format(cache_name)) + cfn = os.path.join(self.opts["cachedir"], f"{cache_name}.cache.p") if cache: if os.path.isfile(cfn): diff --git a/salt/transport/base.py b/salt/transport/base.py index 78d5f11dd32..cd999bd1496 100644 --- a/salt/transport/base.py +++ b/salt/transport/base.py @@ -90,7 +90,7 @@ def publish_server(opts, **kwargs): import salt.transport.local return salt.transport.local.LocalPubServerChannel(opts, **kwargs) - raise Exception("Transport type not found: {}".format(ttype)) + raise Exception(f"Transport type not found: {ttype}") def publish_client(opts, io_loop, host=None, port=None, path=None, transport=None): @@ -118,7 +118,7 @@ def publish_client(opts, io_loop, host=None, port=None, path=None, transport=Non opts, io_loop, host=host, port=port, path=path ) - raise Exception("Transport type not found: {}".format(ttype)) + raise Exception(f"Transport type not found: {ttype}") def _minion_hash(hash_type, minion_id): @@ -154,9 +154,7 @@ def ipc_publish_client(node, opts, io_loop): minion_id=opts.get("hash_id", opts["id"]), ) kwargs.update( - path=os.path.join( - opts["sock_dir"], "minion_event_{}_pub.ipc".format(id_hash) - ) + path=os.path.join(opts["sock_dir"], f"minion_event_{id_hash}_pub.ipc") ) return publish_client(opts, io_loop, **kwargs) @@ -190,13 +188,11 @@ def ipc_publish_server(node, opts): hash_type=opts["hash_type"], minion_id=opts.get("hash_id", opts["id"]), ) - pub_path = os.path.join( - opts["sock_dir"], "minion_event_{}_pub.ipc".format(id_hash) - ) + pub_path = os.path.join(opts["sock_dir"], f"minion_event_{id_hash}_pub.ipc") kwargs.update( pub_path=pub_path, pull_path=os.path.join( - opts["sock_dir"], "minion_event_{}_pull.ipc".format(id_hash) + opts["sock_dir"], f"minion_event_{id_hash}_pull.ipc" ), ) return publish_server(opts, **kwargs) diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py index a5090472b7d..f983a4ec64f 100644 --- a/salt/utils/asynchronous.py +++ b/salt/utils/asynchronous.py @@ -92,7 +92,7 @@ class SyncWrapper: self._async_methods += self.obj._coroutines def __repr__(self): - return " 1: - raise ValueError( - 'found ambiguous "{}" port in "{}"'.format(_s_, host_port) - ) + raise ValueError(f'found ambiguous "{_s_}" port in "{host_port}"') else: if _s_.count(":") == 1: host, _hostport_separator_, port = _s_.partition(":") @@ -2308,7 +2302,7 @@ def parse_host_port(host_port): log.debug('"%s" Not an IP address? Assuming it is a hostname.', host) if host != sanitize_host(host): log.error('bad hostname: "%s"', host) - raise ValueError('bad hostname: "{}"'.format(host)) + raise ValueError(f'bad hostname: "{host}"') return host, port @@ -2339,7 +2333,7 @@ def filter_by_networks(values, networks): elif isinstance(values, Sequence): return _filter(values, networks) else: - raise ValueError("Do not know how to filter a {}".format(type(values))) + raise ValueError(f"Do not know how to filter a {type(values)}") else: return values diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 721972b7337..aded4e1ab08 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -164,7 +164,7 @@ def salt_master_factory( "redundant_minions": "N@min or N@mins", "nodegroup_loop_a": "N@nodegroup_loop_b", "nodegroup_loop_b": "N@nodegroup_loop_a", - "missing_minion": "L@{},ghostminion".format(salt_minion_id), + "missing_minion": f"L@{salt_minion_id},ghostminion", "list_group": "N@multiline_nodegroup", "one_list_group": "N@one_minion_list", "list_group2": "N@list_nodegroup", @@ -176,7 +176,7 @@ def salt_master_factory( "etcd.port": sdb_etcd_port, } config_defaults["vault"] = { - "url": "http://127.0.0.1:{}".format(vault_port), + "url": f"http://127.0.0.1:{vault_port}", "auth": {"method": "token", "token": "testsecret", "uses": 0}, "policies": ["testpolicy"], } @@ -210,7 +210,7 @@ def salt_master_factory( config_overrides["external_auth"] = { "pam": { salt_auth_account_1_factory.username: ["test.*"], - "{}%".format(salt_auth_account_2_factory.group_name): [ + f"{salt_auth_account_2_factory.group_name}%": [ "@wheel", "@runner", "test.*", @@ -308,7 +308,7 @@ def salt_minion_factory(salt_master_factory, salt_minion_id, sdb_etcd_port, vaul "etcd.port": sdb_etcd_port, } config_defaults["vault"] = { - "url": "http://127.0.0.1:{}".format(vault_port), + "url": f"http://127.0.0.1:{vault_port}", "auth": {"method": "token", "token": "testsecret", "uses": 0}, "policies": ["testpolicy"], } @@ -506,7 +506,7 @@ def tmp_path_world_rw(request): Temporary path which is world read/write for tests that run under a different account """ tempdir_path = pathlib.Path(basetemp=tempfile.gettempdir()).resolve() - path = tempdir_path / "world-rw-{}".format(id(request.node)) + path = tempdir_path / f"world-rw-{id(request.node)}" path.mkdir(exist_ok=True) path.chmod(0o777) try: diff --git a/tests/pytests/functional/channel/test_server.py b/tests/pytests/functional/channel/test_server.py index 559ab645327..a6343e0fd64 100644 --- a/tests/pytests/functional/channel/test_server.py +++ b/tests/pytests/functional/channel/test_server.py @@ -50,7 +50,7 @@ def root_dir(tmp_path): def transport_ids(value): - return "transport({})".format(value) + return f"transport({value})" @pytest.fixture(params=["tcp", "zeromq"], ids=transport_ids) diff --git a/tests/pytests/functional/cli/test_batch.py b/tests/pytests/functional/cli/test_batch.py index 50619735c87..adc5406737a 100644 --- a/tests/pytests/functional/cli/test_batch.py +++ b/tests/pytests/functional/cli/test_batch.py @@ -140,7 +140,7 @@ class MockSubscriber: }, use_bin_type=True, ) - tag = "salt/job/{}/ret".format(jid).encode() + tag = f"salt/job/{jid}/ret".encode() return b"".join([tag, b"\n\n", dumped]) def connect(self, timeout=None): diff --git a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py index 65fbfb0ae28..431830d4709 100644 --- a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py @@ -104,7 +104,7 @@ def test_issue_36469_tcp(salt_master, salt_minion, transport): load = { "tgt_type": "glob", "tgt": "*", - "jid": "{}-s{}".format(sid, idx), + "jid": f"{sid}-s{idx}", } await server_channel.publish(load) @@ -125,7 +125,7 @@ def test_issue_36469_tcp(salt_master, salt_minion, transport): load = { "tgt_type": "glob", "tgt": "*", - "jid": "{}-l{}".format(sid, idx), + "jid": f"{sid}-l{idx}", "xdata": "0" * size, } await server_channel.publish(load) @@ -146,8 +146,8 @@ def test_issue_36469_tcp(salt_master, salt_minion, transport): executor.submit(_send_large, opts, 2) executor.submit(_send_small, opts, 3) executor.submit(_send_large, opts, 4) - expect.extend(["{}-s{}".format(a, b) for a in range(10) for b in (1, 3)]) - expect.extend(["{}-l{}".format(a, b) for a in range(10) for b in (2, 4)]) + expect.extend([f"{a}-s{b}" for a in range(10) for b in (1, 3)]) + expect.extend([f"{a}-l{b}" for a in range(10) for b in (2, 4)]) results = server_channel.collector.results assert len(results) == send_num, "{} != {}, difference: {}".format( len(results), send_num, set(expect).difference(results) diff --git a/tests/pytests/integration/minion/test_reauth.py b/tests/pytests/integration/minion/test_reauth.py index 14a1e873551..24b256502e8 100644 --- a/tests/pytests/integration/minion/test_reauth.py +++ b/tests/pytests/integration/minion/test_reauth.py @@ -25,9 +25,7 @@ def test_reauth(salt_master_factory, event_listener): "test_reauth-master", overrides={"log_level": "info"}, ) - sls_tempfile = master.state_tree.base.temp_file( - "{}.sls".format(sls_name), sls_contents - ) + sls_tempfile = master.state_tree.base.temp_file(f"{sls_name}.sls", sls_contents) minion = master.salt_minion_daemon( "test_reauth-minion", overrides={"log_level": "info"}, diff --git a/tests/pytests/scenarios/multimaster/test_offline_master.py b/tests/pytests/scenarios/multimaster/test_offline_master.py index c8ea679328b..8dddb0f6808 100644 --- a/tests/pytests/scenarios/multimaster/test_offline_master.py +++ b/tests/pytests/scenarios/multimaster/test_offline_master.py @@ -22,7 +22,7 @@ def test_minion_hangs_on_master_failure_50814( event_count = 3 while True: check_event_start_time = time.time() - event_tag = "myco/foo/bar/{}".format(event_count) + event_tag = f"myco/foo/bar/{event_count}" ret = mm_master_2_salt_cli.run( "event.send", event_tag, minion_tgt=salt_mm_minion_1.id ) @@ -69,7 +69,7 @@ def test_minion_hangs_on_master_failure_50814( event_count = 1 while True: check_event_start_time = time.time() - event_tag = "myco/foo/bar/{}".format(event_count) + event_tag = f"myco/foo/bar/{event_count}" ret = mm_master_2_salt_cli.run( "event.send", event_tag, minion_tgt=salt_mm_minion_1.id ) diff --git a/tests/pytests/unit/channel/test_request_channel.py b/tests/pytests/unit/channel/test_request_channel.py index 148cbc32b85..b2245dc148e 100644 --- a/tests/pytests/unit/channel/test_request_channel.py +++ b/tests/pytests/unit/channel/test_request_channel.py @@ -275,36 +275,53 @@ def test_master_uri(): "salt.transport.zeromq.ZMQ_VERSION_INFO", (16, 0, 1) ): # pass in both source_ip and source_port - assert salt.transport.zeromq._get_master_uri( - master_ip=m_ip, master_port=m_port, source_ip=s_ip, source_port=s_port - ) == "tcp://{}:{};{}:{}".format(s_ip, s_port, m_ip, m_port) + assert ( + salt.transport.zeromq._get_master_uri( + master_ip=m_ip, master_port=m_port, source_ip=s_ip, source_port=s_port + ) + == f"tcp://{s_ip}:{s_port};{m_ip}:{m_port}" + ) - assert salt.transport.zeromq._get_master_uri( - master_ip=m_ip6, master_port=m_port, source_ip=s_ip6, source_port=s_port - ) == "tcp://[{}]:{};[{}]:{}".format(s_ip6, s_port, m_ip6, m_port) + assert ( + salt.transport.zeromq._get_master_uri( + master_ip=m_ip6, master_port=m_port, source_ip=s_ip6, source_port=s_port + ) + == f"tcp://[{s_ip6}]:{s_port};[{m_ip6}]:{m_port}" + ) # source ip and source_port empty - assert salt.transport.zeromq._get_master_uri( - master_ip=m_ip, master_port=m_port - ) == "tcp://{}:{}".format(m_ip, m_port) + assert ( + salt.transport.zeromq._get_master_uri(master_ip=m_ip, master_port=m_port) + == f"tcp://{m_ip}:{m_port}" + ) - assert salt.transport.zeromq._get_master_uri( - master_ip=m_ip6, master_port=m_port - ) == "tcp://[{}]:{}".format(m_ip6, m_port) + assert ( + salt.transport.zeromq._get_master_uri(master_ip=m_ip6, master_port=m_port) + == f"tcp://[{m_ip6}]:{m_port}" + ) # pass in only source_ip - assert salt.transport.zeromq._get_master_uri( - master_ip=m_ip, master_port=m_port, source_ip=s_ip - ) == "tcp://{}:0;{}:{}".format(s_ip, m_ip, m_port) + assert ( + salt.transport.zeromq._get_master_uri( + master_ip=m_ip, master_port=m_port, source_ip=s_ip + ) + == f"tcp://{s_ip}:0;{m_ip}:{m_port}" + ) - assert salt.transport.zeromq._get_master_uri( - master_ip=m_ip6, master_port=m_port, source_ip=s_ip6 - ) == "tcp://[{}]:0;[{}]:{}".format(s_ip6, m_ip6, m_port) + assert ( + salt.transport.zeromq._get_master_uri( + master_ip=m_ip6, master_port=m_port, source_ip=s_ip6 + ) + == f"tcp://[{s_ip6}]:0;[{m_ip6}]:{m_port}" + ) # pass in only source_port - assert salt.transport.zeromq._get_master_uri( - master_ip=m_ip, master_port=m_port, source_port=s_port - ) == "tcp://0.0.0.0:{};{}:{}".format(s_port, m_ip, m_port) + assert ( + salt.transport.zeromq._get_master_uri( + master_ip=m_ip, master_port=m_port, source_port=s_port + ) + == f"tcp://0.0.0.0:{s_port};{m_ip}:{m_port}" + ) def test_clear_req_channel_master_uri_override(temp_salt_minion, temp_salt_master): diff --git a/tests/pytests/unit/transport/test_publish_client.py b/tests/pytests/unit/transport/test_publish_client.py index e7f82f578c5..3a4ee21a9fd 100644 --- a/tests/pytests/unit/transport/test_publish_client.py +++ b/tests/pytests/unit/transport/test_publish_client.py @@ -23,7 +23,7 @@ pytestmark = [ def transport_ids(value): - return "Transport({})".format(value) + return f"Transport({value})" @pytest.fixture(params=("zeromq", "tcp"), ids=transport_ids) diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py index 76c9409a1af..fd2c45cfee6 100644 --- a/tests/pytests/unit/utils/test_gitfs.py +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -55,7 +55,7 @@ def test_provider_case_insensitive_gitfs_provider(minion_opts, role_name, role_c Ensure that both lowercase and non-lowercase values are supported """ provider = "GitPython" - key = "{}_provider".format(role_name) + key = f"{role_name}_provider" with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)): with patch.object(role_class, "verify_pygit2", MagicMock(return_value=False)): args = [minion_opts, {}] @@ -93,7 +93,7 @@ def test_valid_provider_gitfs_provider(minion_opts, role_name, role_class): """ return MagicMock(return_value=verify.endswith(provider)) - key = "{}_provider".format(role_name) + key = f"{role_name}_provider" for provider in salt.utils.gitfs.GIT_PROVIDERS: verify = "verify_gitpython" mock1 = _get_mock(verify, provider) diff --git a/tests/support/helpers.py b/tests/support/helpers.py index d1ba77a2c56..d8b7f2915f8 100644 --- a/tests/support/helpers.py +++ b/tests/support/helpers.py @@ -74,7 +74,7 @@ def no_symlinks(): ret = subprocess.run( ["git", "config", "--get", "core.symlinks"], shell=False, - universal_newlines=True, + text=True, cwd=RUNTIME_VARS.CODE_DIR, stdout=subprocess.PIPE, check=False, @@ -509,7 +509,7 @@ class ForceImportErrorOn: if name in self.__module_names: importerror_fromlist = self.__module_names.get(name) if importerror_fromlist is None: - raise ImportError("Forced ImportError raised for {!r}".format(name)) + raise ImportError(f"Forced ImportError raised for {name!r}") if importerror_fromlist.intersection(set(fromlist)): raise ImportError( @@ -699,7 +699,7 @@ def with_system_user( log.debug("Failed to create system user") # The user was not created if on_existing == "skip": - cls.skipTest("Failed to create system user {!r}".format(username)) + cls.skipTest(f"Failed to create system user {username!r}") if on_existing == "delete": log.debug("Deleting the system user %r", username) @@ -727,7 +727,7 @@ def with_system_user( hashed_password = password else: hashed_password = salt.utils.pycrypto.gen_hash(password=password) - hashed_password = "'{}'".format(hashed_password) + hashed_password = f"'{hashed_password}'" add_pwd = cls.run_function( "shadow.set_password", [username, hashed_password] ) @@ -806,7 +806,7 @@ def with_system_group(group, on_existing="delete", delete=True): log.debug("Failed to create system group") # The group was not created if on_existing == "skip": - cls.skipTest("Failed to create system group {!r}".format(group)) + cls.skipTest(f"Failed to create system group {group!r}") if on_existing == "delete": log.debug("Deleting the system group %r", group) @@ -903,7 +903,7 @@ def with_system_user_and_group(username, group, on_existing="delete", delete=Tru log.debug("Failed to create system user") # The user was not created if on_existing == "skip": - cls.skipTest("Failed to create system user {!r}".format(username)) + cls.skipTest(f"Failed to create system user {username!r}") if on_existing == "delete": log.debug("Deleting the system user %r", username) @@ -930,7 +930,7 @@ def with_system_user_and_group(username, group, on_existing="delete", delete=Tru log.debug("Failed to create system group") # The group was not created if on_existing == "skip": - cls.skipTest("Failed to create system group {!r}".format(group)) + cls.skipTest(f"Failed to create system group {group!r}") if on_existing == "delete": log.debug("Deleting the system group %r", group) @@ -1099,7 +1099,7 @@ def _check_required_sminion_attributes(sminion_attr, *required_items): available_items = list(getattr(sminion, sminion_attr)) not_available_items = set() - name = "__not_available_{items}s__".format(items=sminion_attr) + name = f"__not_available_{sminion_attr}s__" if not hasattr(sminion, name): setattr(sminion, name, set()) @@ -1181,13 +1181,13 @@ def skip_if_binaries_missing(*binaries, **kwargs): if salt.utils.path.which(binary) is None: return skip( "{}The {!r} binary was not found".format( - message and "{}. ".format(message) or "", binary + message and f"{message}. " or "", binary ) ) elif salt.utils.path.which_bin(binaries) is None: return skip( "{}None of the following binaries was found: {}".format( - message and "{}. ".format(message) or "", ", ".join(binaries) + message and f"{message}. " or "", ", ".join(binaries) ) ) return _id @@ -1747,7 +1747,7 @@ class VirtualEnv: pytest.fail("'virtualenv' binary not found") cmd = [ virtualenv, - "--python={}".format(self.get_real_python()), + f"--python={self.get_real_python()}", ] if self.system_site_packages: cmd.append("--system-site-packages") @@ -1880,7 +1880,7 @@ class Keys: @property def pub_path(self): - return self.priv_path.with_name("{}.pub".format(self.priv_path.name)) + return self.priv_path.with_name(f"{self.priv_path.name}.pub") @property def pub(self): diff --git a/tests/support/pytest/transport.py b/tests/support/pytest/transport.py index 26397d3952a..1835590275f 100644 --- a/tests/support/pytest/transport.py +++ b/tests/support/pytest/transport.py @@ -87,7 +87,7 @@ class Collector(salt.utils.process.SignalHandlingProcess): self.sock = self.ctx.socket(zmq.SUB) self.sock.setsockopt(zmq.LINGER, -1) self.sock.setsockopt(zmq.SUBSCRIBE, b"") - pub_uri = "tcp://{}:{}".format(self.interface, self.port) + pub_uri = f"tcp://{self.interface}:{self.port}" log.info("Collector listen %s", pub_uri) self.sock.connect(pub_uri) else: From e8a392577d65203d21fb5074224b35c85d43f7f3 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 17 Aug 2023 09:54:54 +0100 Subject: [PATCH 69/73] Fix merged forwarded code and test Signed-off-by: Pedro Algarvio --- salt/transport/zeromq.py | 7 ++++--- tests/pytests/unit/transport/test_zeromq.py | 10 ++-------- tests/support/mock.py | 5 +++-- 3 files changed, 9 insertions(+), 13 deletions(-) diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index d2105a61b88..373399c5230 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -543,14 +543,15 @@ class RequestServer(salt.transport.base.DaemonizedRequestServer): await self._socket.send(self.encode_payload(reply)) except asyncio.exceptions.TimeoutError: continue - except salt.exceptions.SaltDeserializationError: - await self._socket.send(self.encode_payload({"msg": "bad load"})) except Exception as exc: # pylint: disable=broad-except log.error("Exception in request handler", exc_info=True) break async def handle_message(self, stream, payload): - payload = self.decode_payload(payload) + try: + payload = self.decode_payload(payload) + except salt.exceptions.SaltDeserializationError: + return {"msg": "bad load"} return await self.message_handler(payload) def encode_payload(self, payload): diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 1de55463baa..e8304eede00 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -3,7 +3,6 @@ import pytest import salt.config import salt.transport.zeromq -from tests.support.mock import MagicMock async def test_req_server_garbage_request(io_loop): @@ -22,14 +21,9 @@ async def test_req_server_garbage_request(io_loop): byts = msgpack.dumps({"foo": "bar"}) badbyts = byts[:3] + b"^M" + byts[3:] - valid_response = msgpack.dumps("Invalid payload") - - stream = MagicMock() - request_server.stream = stream - try: - await request_server.handle_message(stream, badbyts) + ret = await request_server.handle_message(None, badbyts) except Exception as exc: # pylint: disable=broad-except pytest.fail(f"Exception was raised {exc}") - request_server.stream.send.assert_called_once_with(valid_response) + assert ret == {"msg": "bad load"} diff --git a/tests/support/mock.py b/tests/support/mock.py index 2256ad8f5da..2f9970d4b04 100644 --- a/tests/support/mock.py +++ b/tests/support/mock.py @@ -27,6 +27,7 @@ from mock import ( ANY, DEFAULT, FILTER_DIR, + AsyncMock, MagicMock, Mock, NonCallableMagicMock, @@ -202,7 +203,7 @@ class MockFH: ) elif not self.binary_mode and content_type is not str: raise TypeError( - "write() argument must be str, not {}".format(content_type.__name__) + f"write() argument must be str, not {content_type.__name__}" ) def _writelines(self, lines): @@ -233,7 +234,7 @@ class MockCall: ret = ret[:-2] else: for key, val in self.kwargs.items(): - ret += "{}={}".format(salt.utils.stringutils.to_str(key), repr(val)) + ret += f"{salt.utils.stringutils.to_str(key)}={repr(val)}" ret += ")" return ret From a6cb942a3b952cab57ec888b7548270f3df4a5fd Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 17 Aug 2023 20:34:21 +0100 Subject: [PATCH 70/73] Add back code logic that `pyupgrade` removed. We still need this for salt-ssh Signed-off-by: Pedro Algarvio --- salt/_logging/impl.py | 33 +++++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py index f1302222df1..7b3ab99631a 100644 --- a/salt/_logging/impl.py +++ b/salt/_logging/impl.py @@ -275,16 +275,29 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta): else: extra["exc_info_on_loglevel"] = exc_info_on_loglevel - LOGGING_LOGGER_CLASS._log( - self, - level, - msg, - args, - exc_info=exc_info, - extra=extra, - stack_info=stack_info, - stacklevel=stacklevel, - ) + try: + LOGGING_LOGGER_CLASS._log( + self, + level, + msg, + args, + exc_info=exc_info, + extra=extra, + stack_info=stack_info, + stacklevel=stacklevel, + ) + except TypeError: + # Python < 3.8 - We still need this for salt-ssh since it will use + # the system python, and not out onedir. + LOGGING_LOGGER_CLASS._log( + self, + level, + msg, + args, + exc_info=exc_info, + extra=extra, + stack_info=stack_info, + ) def makeRecord( self, From 673e6ecccbdcc1416b405af0f748b69dc24467ee Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 21 Aug 2023 16:12:01 -0700 Subject: [PATCH 71/73] Remove dead code reference --- tests/support/pytest/transport.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/support/pytest/transport.py b/tests/support/pytest/transport.py index 1835590275f..8e9b4504169 100644 --- a/tests/support/pytest/transport.py +++ b/tests/support/pytest/transport.py @@ -137,7 +137,6 @@ class Collector(salt.utils.process.SignalHandlingProcess): self.started.set() last_msg = time.time() self.start = last_msg - serial = salt.payload.Serial(self.minion_config) crypticle = salt.crypt.Crypticle(self.minion_config, self.aes_key) self.gotone = False try: From 07033471bdcfcdca069e8931644e095e24c9bc51 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 23 Aug 2023 12:38:32 +0100 Subject: [PATCH 72/73] Close the `inotify` and `watchdog` instances after each test --- tests/pytests/unit/beacons/test_inotify.py | 12 ++++++++++-- tests/pytests/unit/beacons/test_watchdog.py | 8 ++++++++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/beacons/test_inotify.py b/tests/pytests/unit/beacons/test_inotify.py index 30a9a91db4b..07907cecfc2 100644 --- a/tests/pytests/unit/beacons/test_inotify.py +++ b/tests/pytests/unit/beacons/test_inotify.py @@ -26,6 +26,14 @@ def configure_loader_modules(): return {inotify: {}} +@pytest.fixture(autouse=True) +def _close_inotify(configure_loader_modules): + try: + yield + finally: + inotify.close({}) + + def test_non_list_config(): config = {} @@ -212,8 +220,8 @@ def test_multi_files_exclude(tmp_path): dp2 = str(tmp_path / "subdir2") os.mkdir(dp1) os.mkdir(dp2) - _exclude1 = "{}/subdir1/*tmpfile*$".format(str(tmp_path)) - _exclude2 = "{}/subdir2/*filetmp*$".format(str(tmp_path)) + _exclude1 = f"{str(tmp_path)}/subdir1/*tmpfile*$" + _exclude2 = f"{str(tmp_path)}/subdir2/*filetmp*$" config = [ { "files": { diff --git a/tests/pytests/unit/beacons/test_watchdog.py b/tests/pytests/unit/beacons/test_watchdog.py index df150a028ef..de622b5ea0d 100644 --- a/tests/pytests/unit/beacons/test_watchdog.py +++ b/tests/pytests/unit/beacons/test_watchdog.py @@ -45,6 +45,14 @@ def configure_loader_modules(): return {watchdog: {}} +@pytest.fixture(autouse=True) +def _close_watchdog(configure_loader_modules): + try: + yield + finally: + watchdog.close({}) + + def assertValid(config): ret = watchdog.validate(config) assert ret == (True, "Valid beacon configuration") From cc834784be6d8197af0feaec6972e3deb82d01ac Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 23 Aug 2023 15:41:08 +0100 Subject: [PATCH 73/73] Close request server after testing it --- tests/pytests/unit/transport/test_zeromq.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index e8304eede00..0380457903e 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -25,5 +25,7 @@ async def test_req_server_garbage_request(io_loop): ret = await request_server.handle_message(None, badbyts) except Exception as exc: # pylint: disable=broad-except pytest.fail(f"Exception was raised {exc}") + finally: + request_server.close() assert ret == {"msg": "bad load"}