Merge 3006.x into master

This commit is contained in:
Pedro Algarvio 2023-05-06 08:37:41 +01:00
commit dfa273bc5e
No known key found for this signature in database
GPG key ID: BB36BF6584A298FF
126 changed files with 6509 additions and 2436 deletions

View file

@ -4,7 +4,7 @@ description: Setup actionlint
inputs:
version:
description: The version of actionlint
default: 1.6.23
default: 1.6.24
cache-seed:
required: true
type: string

View file

@ -22,7 +22,7 @@ inputs:
required: false
type: string
description: The version of relenv to use
default: 0.10.2
default: 0.12.3
outputs:
version:

View file

@ -22,9 +22,9 @@ jobs:
arch:
- x86_64
- aarch64
src:
- true
- false
source:
- onedir
- src
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-11
@ -79,34 +79,31 @@ jobs:
- name: Build Deb
working-directory: pkgs/checkout/
if: ${{ matrix.src != true}}
run: |
tools pkg build deb --onedir salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
- name: Build Deb
working-directory: pkgs/checkout/
if: ${{ matrix.src == true}}
run: |
tools pkg build deb --arch ${{ matrix.arch }}
tools pkg build deb ${{
matrix.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
}}
- name: Cleanup
run: |
rm -rf pkgs/checkout/
- name: Upload DEBs
uses: actions/upload-artifact@v3
if: ${{ matrix.src == false}}
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb
path: ${{ github.workspace }}/pkgs/*
retention-days: 7
if-no-files-found: error
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ matrix.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"
fi
- name: Upload DEBs
uses: actions/upload-artifact@v3
if: ${{ matrix.src == true}}
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
path: ${{ github.workspace }}/pkgs/*
retention-days: 7
if-no-files-found: error

View file

@ -21,7 +21,7 @@ on:
relenv-version:
required: false
type: string
default: 0.10.2
default: 0.12.3
description: The version of relenv to use
python-version-linux:
required: false

View file

@ -8,6 +8,10 @@ on:
type: string
required: true
description: The Salt version to set prior to building packages.
sign-packages:
type: boolean
default: false
description: Sign Packages
environment:
type: string
description: The GitHub Environment where this workflow should run
@ -31,12 +35,23 @@ jobs:
shell: bash
id: check-pkg-sign
run: |
if [ "${{ (secrets.MAC_SIGN_APPLE_ACCT != '' && contains(fromJSON('["nightly", "staging"]'), inputs.environment)) && 'true' || 'false' }}" != "true" ]; then
echo "The packages created will NOT be signed"
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
if [ "${{ inputs.sign-packages }}" == "true" ]; then
if [ "${{ (secrets.MAC_SIGN_APPLE_ACCT != '' && contains(fromJSON('["nightly", "staging"]'), inputs.environment)) && 'true' || 'false' }}" != "true" ]; then
MSG="Secrets for signing packages are not available. The packages created will NOT be signed."
echo "${MSG}"
echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}"
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
else
MSG="The packages created WILL be signed."
echo "${MSG}"
echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}"
echo "sign-pkgs=true" >> "$GITHUB_OUTPUT"
fi
else
echo "The packages created WILL be signed"
echo "sign-pkgs=true" >> "$GITHUB_OUTPUT"
MSG="The sign-packages input is false. The packages created will NOT be signed."
echo "${MSG}"
echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}"
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
fi
- uses: actions/checkout@v3

View file

@ -25,9 +25,9 @@ jobs:
arch:
- x86_64
- aarch64
src:
- true
- false
source:
- onedir
- src
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9
@ -67,29 +67,27 @@ jobs:
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
- name: Build RPM
if: ${{ matrix.src != true}}
run: |
tools pkg build rpm --onedir salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
tools pkg build rpm ${{
matrix.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
}}
- name: Build RPM
if: ${{ matrix.src == true}}
- name: Set Artifact Name
id: set-artifact-name
run: |
tools pkg build rpm --arch ${{ matrix.arch }}
if [ "${{ matrix.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"
fi
- name: Upload RPMs
uses: actions/upload-artifact@v3
if: ${{ matrix.src != true}}
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm
path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm
retention-days: 7
if-no-files-found: error
- name: Upload RPMs
uses: actions/upload-artifact@v3
if: ${{ matrix.src == true}}
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm
retention-days: 7
if-no-files-found: error

View file

@ -21,7 +21,7 @@ on:
relenv-version:
required: false
type: string
default: 0.10.2
default: 0.12.3
description: The version of relenv to use
python-version-linux:
required: false

View file

@ -8,6 +8,10 @@ on:
type: string
required: true
description: The Salt version to set prior to building packages
sign-packages:
type: boolean
default: false
description: Sign Packages
environment:
type: string
description: The GitHub Environment where this workflow should run
@ -40,12 +44,23 @@ jobs:
shell: bash
id: check-pkg-sign
run: |
if [ "${{ (secrets.WIN_SIGN_API_KEY != '' && env.SM_HOST != '' && inputs.environment == 'staging') && 'true' || 'false' }}" != "true" ]; then
echo "The packages created will NOT be signed"
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
if [ "${{ inputs.sign-packages }}" == "true" ]; then
if [ "${{ (secrets.WIN_SIGN_API_KEY != '' && env.SM_HOST != '' && inputs.environment == 'staging') && 'true' || 'false' }}" != "true" ]; then
MSG="Secrets for signing packages are not available. The packages created will NOT be signed."
echo "${MSG}"
echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}"
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
else
MSG="The packages created WILL be signed."
echo "${MSG}"
echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}"
echo "sign-pkgs=true" >> "$GITHUB_OUTPUT"
fi
else
echo "The packages created WILL be signed"
echo "sign-pkgs=true" >> "$GITHUB_OUTPUT"
MSG="The sign-packages input is false. The packages created will NOT be signed."
echo "${MSG}"
echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}"
echo "sign-pkgs=false" >> "$GITHUB_OUTPUT"
fi
- uses: actions/checkout@v3

View file

@ -37,6 +37,8 @@ jobs:
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
with:
@ -204,6 +206,18 @@ jobs:
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-releases
- name: Check Salt Releases
run: |
echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.'
echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.'
- name: Define Testrun
id: define-testrun
run: |
@ -434,7 +448,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"
@ -452,7 +466,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"

View file

@ -6,7 +6,16 @@ name: Nightly
run-name: "Nightly (branch: ${{ github.ref_name }})"
on:
workflow_dispatch: {}
workflow_dispatch:
inputs:
skip-salt-test-suite:
type: boolean
default: false
description: Skip running the Salt test suite.
skip-salt-pkg-test-suite:
type: boolean
default: false
description: Skip running the Salt packages test suite.
schedule:
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
- cron: '0 1 * * *' # Every day at 1AM
@ -81,6 +90,8 @@ jobs:
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
with:
@ -242,12 +253,24 @@ jobs:
- name: Define Jobs
id: define-jobs
run: |
tools ci define-jobs ${{ github.event_name }} changed-files.json
tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json
- name: Check Defined Jobs
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-releases
- name: Check Salt Releases
run: |
echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.'
echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.'
- name: Define Testrun
id: define-testrun
run: |
@ -483,7 +506,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"
@ -501,7 +524,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"
@ -536,6 +559,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: nightly
sign-packages: false
secrets: inherit
build-macos-pkgs:
@ -548,6 +572,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: nightly
sign-packages: true
secrets: inherit
amazonlinux-2-pkg-tests:
@ -1409,18 +1434,23 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build \
tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Copy Files For Source Only Artifact Uploads
run: |
mkdir artifacts/src
find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \;
- name: Upload Standalone Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-src-repo
path: |
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-*
artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
artifacts/src/*-GPG-*
retention-days: 7
if-no-files-found: error
@ -1530,7 +1560,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \
tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1674,7 +1704,7 @@ jobs:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \
tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1769,7 +1799,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build \
tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1845,7 +1875,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build \
tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1957,7 +1987,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build \
tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1972,6 +2002,7 @@ jobs:
publish-repositories:
name: Publish Repositories
if: ${{ always() && ! failure() && ! cancelled() }}
runs-on:
- self-hosted
- linux

View file

@ -44,25 +44,20 @@ jobs:
require: admin
username: ${{ github.triggering_actor }}
- name: Check Branch
run: |
echo "Trying to run the staging workflow from branch ${{ github.ref_name }}"
if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then
echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed"
echo "Allowed branches: master, 3006.x"
exit 1
else
echo "Allowed to release from branch ${{ github.ref_name }}"
fi
prepare-workflow:
name: Prepare Workflow Run
runs-on: ubuntu-latest
runs-on:
- self-hosted
- linux
- repo-release
environment: release
needs:
- check-requirements
outputs:
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
with:
@ -93,11 +88,77 @@ jobs:
run: |
tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}
- name: Check Release Staged
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}
- name: Get Salt Releases
id: get-salt-releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-releases
- name: Check Salt Releases
run: |
echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.'
echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.'
- name: Set Cache Seed Output
id: set-cache-seed
run: |
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
download-onedir-artifact:
name: Download Staging Onedir Artifact
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
runs-on:
- self-hosted
- linux
- repo-release
environment: release
needs:
- prepare-workflow
strategy:
fail-fast: false
matrix:
include:
- platform: linux
arch: x86_64
- platform: linux
arch: aarch64
- platform: windows
arch: amd64
- platform: windows
arch: x86
- platform: darwin
arch: x86_64
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Download Onedir Tarball Artifact
run: |
tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
backup:
name: Backup
runs-on:
@ -107,6 +168,8 @@ jobs:
needs:
- prepare-workflow
environment: release
outputs:
backup-complete: ${{ steps.backup.outputs.backup-complete }}
steps:
- name: Clone The Salt Repository
@ -121,6 +184,7 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
- name: Backup Previous Releases
id: backup
run: |
tools pkg repo backup-previous-releases
@ -133,6 +197,7 @@ jobs:
needs:
- prepare-workflow
- backup
- download-onedir-artifact
environment: release
steps:
@ -155,8 +220,581 @@ jobs:
run: |
tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }}
almalinux-8-pkg-download-tests:
name: Test Alma Linux 8 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: almalinux-8
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
almalinux-8-arm64-pkg-download-tests:
name: Test Alma Linux 8 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: almalinux-8-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
almalinux-9-pkg-download-tests:
name: Test Alma Linux 9 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: almalinux-9
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
almalinux-9-arm64-pkg-download-tests:
name: Test Alma Linux 9 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: almalinux-9-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
amazonlinux-2-pkg-download-tests:
name: Test Amazon Linux 2 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: amazonlinux-2
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
amazonlinux-2-arm64-pkg-download-tests:
name: Test Amazon Linux 2 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: amazonlinux-2-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centos-7-pkg-download-tests:
name: Test CentOS 7 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centos-7
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centos-7-arm64-pkg-download-tests:
name: Test CentOS 7 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centos-7-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-8-pkg-download-tests:
name: Test CentOS Stream 8 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-8
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-8-arm64-pkg-download-tests:
name: Test CentOS Stream 8 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-8-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-9-pkg-download-tests:
name: Test CentOS Stream 9 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-9
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-9-arm64-pkg-download-tests:
name: Test CentOS Stream 9 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-9-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
debian-10-pkg-download-tests:
name: Test Debian 10 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: debian-10
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
debian-11-pkg-download-tests:
name: Test Debian 11 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: debian-11
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
debian-11-arm64-pkg-download-tests:
name: Test Debian 11 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: debian-11-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-36-pkg-download-tests:
name: Test Fedora 36 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-36
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-36-arm64-pkg-download-tests:
name: Test Fedora 36 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-36-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-37-pkg-download-tests:
name: Test Fedora 37 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-37
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-37-arm64-pkg-download-tests:
name: Test Fedora 37 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-37-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-38-pkg-download-tests:
name: Test Fedora 38 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-38
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-38-arm64-pkg-download-tests:
name: Test Fedora 38 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-38-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
photonos-3-pkg-download-tests:
name: Test Photon OS 3 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: photonos-3
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
photonos-4-pkg-download-tests:
name: Test Photon OS 4 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: photonos-4
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2004-pkg-download-tests:
name: Test Ubuntu 20.04 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-20.04
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2004-arm64-pkg-download-tests:
name: Test Ubuntu 20.04 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-20.04-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2204-pkg-download-tests:
name: Test Ubuntu 22.04 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-22.04
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2204-arm64-pkg-download-tests:
name: Test Ubuntu 22.04 Arm64 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-22.04-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
macos-12-pkg-download-tests:
name: Test macOS 12 Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-macos.yml
with:
distro-slug: macos-12
platform: darwin
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
windows-2022-nsis-amd64-pkg-download-tests:
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
name: Test Windows 2022 amd64 NSIS Package Downloads
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: windows-2022
platform: windows
arch: amd64
pkg-type: NSIS
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
windows-2022-msi-amd64-pkg-download-tests:
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
name: Test Windows 2022 amd64 MSI Package Downloads
needs:
- prepare-workflow
- publish-repositories
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: windows-2022
platform: windows
arch: amd64
pkg-type: MSI
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
release:
name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
if: ${{ always() && ! failure() && ! cancelled() }}
runs-on:
- self-hosted
- linux
@ -165,6 +803,36 @@ jobs:
- prepare-workflow
- backup
- publish-repositories
- almalinux-8-pkg-download-tests
- almalinux-8-arm64-pkg-download-tests
- almalinux-9-pkg-download-tests
- almalinux-9-arm64-pkg-download-tests
- amazonlinux-2-pkg-download-tests
- amazonlinux-2-arm64-pkg-download-tests
- centos-7-pkg-download-tests
- centos-7-arm64-pkg-download-tests
- centosstream-8-pkg-download-tests
- centosstream-8-arm64-pkg-download-tests
- centosstream-9-pkg-download-tests
- centosstream-9-arm64-pkg-download-tests
- debian-10-pkg-download-tests
- debian-11-pkg-download-tests
- debian-11-arm64-pkg-download-tests
- fedora-36-pkg-download-tests
- fedora-36-arm64-pkg-download-tests
- fedora-37-pkg-download-tests
- fedora-37-arm64-pkg-download-tests
- fedora-38-pkg-download-tests
- fedora-38-arm64-pkg-download-tests
- photonos-3-pkg-download-tests
- photonos-4-pkg-download-tests
- ubuntu-2004-pkg-download-tests
- ubuntu-2004-arm64-pkg-download-tests
- ubuntu-2204-pkg-download-tests
- ubuntu-2204-arm64-pkg-download-tests
- macos-12-pkg-download-tests
- windows-2022-nsis-amd64-pkg-download-tests
- windows-2022-msi-amd64-pkg-download-tests
environment: release
steps:
- name: Clone The Salt Repository
@ -261,45 +929,12 @@ jobs:
retention-days: 7
if-no-files-found: error
restore:
name: Restore Release Bucket From Backup
if: ${{ always() }}
runs-on:
- self-hosted
- linux
- repo-release
needs:
- release
environment: release
steps:
- name: Clone The Salt Repository
if: ${{ failure() || cancelled() }}
uses: actions/checkout@v3
with:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
- name: Setup Rclone
if: ${{ failure() || cancelled() }}
uses: AnimMouse/setup-rclone@v1
with:
version: v1.61.1
- name: Setup Python Tools Scripts
if: ${{ failure() || cancelled() }}
uses: ./.github/actions/setup-python-tools-scripts
- name: Restore Release Bucket
if: ${{ failure() || cancelled() }}
run: |
tools pkg repo restore-previous-releases
publish-pypi:
name: Publish to PyPi
if: ${{ github.event.repository.fork != true }}
if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }}
needs:
- prepare-workflow
- release
- restore
environment: release
runs-on:
- self-hosted
@ -358,13 +993,21 @@ jobs:
- prepare-workflow
- publish-repositories
- release
- restore
- publish-pypi
steps:
- name: Get workflow information
id: get-workflow-info
uses: technote-space/workflow-conclusion-action@v3
- run: |
# shellcheck disable=SC2129
if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then
echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}"
echo '```' >> "${GITHUB_STEP_SUMMARY}"
echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}"
echo '```' >> "${GITHUB_STEP_SUMMARY}"
fi
- name: Set Pipeline Exit Status
shell: bash
run: |

View file

@ -80,6 +80,8 @@ jobs:
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
with:
@ -247,6 +249,18 @@ jobs:
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-releases
- name: Check Salt Releases
run: |
echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.'
echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.'
- name: Define Testrun
id: define-testrun
run: |
@ -477,7 +491,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"
@ -495,7 +509,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"

View file

@ -14,6 +14,10 @@ on:
description: >
The Salt version to set prior to building packages and staging the release.
(DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0).
sign-windows-packages:
type: boolean
default: false
description: Sign Windows Packages
skip-salt-test-suite:
type: boolean
default: false
@ -53,17 +57,6 @@ jobs:
require: admin
username: ${{ github.triggering_actor }}
- name: Check Branch
run: |
echo "Trying to run the staging workflow from branch ${{ github.ref_name }}"
if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then
echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed"
echo "Allowed branches: master, 3006.x"
exit 1
else
echo "Allowed to release from branch ${{ github.ref_name }}"
fi
prepare-workflow:
name: Prepare Workflow Run
runs-on: ubuntu-latest
@ -77,6 +70,8 @@ jobs:
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
with:
@ -250,6 +245,18 @@ jobs:
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-releases
- name: Check Salt Releases
run: |
echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.'
echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.'
- name: Define Testrun
id: define-testrun
run: |
@ -484,7 +491,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"
@ -502,7 +509,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.11.2"
relenv-version: "0.12.3"
python-version-linux: "3.10.11"
python-version-macos: "3.10.11"
python-version-windows: "3.10.11"
@ -537,6 +544,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
sign-packages: ${{ inputs.sign-windows-packages }}
secrets: inherit
build-macos-pkgs:
@ -549,6 +557,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
sign-packages: true
secrets: inherit
amazonlinux-2-pkg-tests:
@ -1414,14 +1423,19 @@ jobs:
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Copy Files For Source Only Artifact Uploads
run: |
mkdir artifacts/src
find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \;
- name: Upload Standalone Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo
path: |
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-*
artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
artifacts/src/*-GPG-*
retention-days: 7
if-no-files-found: error
@ -1975,6 +1989,7 @@ jobs:
publish-repositories:
name: Publish Repositories
if: ${{ always() && ! failure() && ! cancelled() }}
runs-on:
- self-hosted
- linux
@ -2070,6 +2085,10 @@ jobs:
name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub
path: artifacts/release
- name: Show Release Artifacts
run: |
tree -a artifacts/release
- name: Upload Release Artifacts
run: |
tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release
@ -2084,24 +2103,494 @@ jobs:
retention-days: 7
if-no-files-found: error
test-linux-pkg-downloads:
name: Test Linux Package Downloads
almalinux-8-pkg-download-tests:
name: Test Alma Linux 8 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-latest
distro-slug: almalinux-8
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
test-macos-pkg-downloads:
name: Test macOS Package Downloads
almalinux-8-arm64-pkg-download-tests:
name: Test Alma Linux 8 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: almalinux-8-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
almalinux-9-pkg-download-tests:
name: Test Alma Linux 9 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: almalinux-9
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
almalinux-9-arm64-pkg-download-tests:
name: Test Alma Linux 9 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: almalinux-9-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
amazonlinux-2-pkg-download-tests:
name: Test Amazon Linux 2 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: amazonlinux-2
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
amazonlinux-2-arm64-pkg-download-tests:
name: Test Amazon Linux 2 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: amazonlinux-2-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centos-7-pkg-download-tests:
name: Test CentOS 7 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centos-7
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centos-7-arm64-pkg-download-tests:
name: Test CentOS 7 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centos-7-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-8-pkg-download-tests:
name: Test CentOS Stream 8 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-8
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-8-arm64-pkg-download-tests:
name: Test CentOS Stream 8 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-8-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-9-pkg-download-tests:
name: Test CentOS Stream 9 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-9
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
centosstream-9-arm64-pkg-download-tests:
name: Test CentOS Stream 9 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: centosstream-9-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
debian-10-pkg-download-tests:
name: Test Debian 10 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: debian-10
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
debian-11-pkg-download-tests:
name: Test Debian 11 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: debian-11
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
debian-11-arm64-pkg-download-tests:
name: Test Debian 11 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: debian-11-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-36-pkg-download-tests:
name: Test Fedora 36 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-36
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-36-arm64-pkg-download-tests:
name: Test Fedora 36 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-36-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-37-pkg-download-tests:
name: Test Fedora 37 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-37
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-37-arm64-pkg-download-tests:
name: Test Fedora 37 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-37-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-38-pkg-download-tests:
name: Test Fedora 38 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-38
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
fedora-38-arm64-pkg-download-tests:
name: Test Fedora 38 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: fedora-38-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
photonos-3-pkg-download-tests:
name: Test Photon OS 3 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: photonos-3
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
photonos-4-pkg-download-tests:
name: Test Photon OS 4 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: photonos-4
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2004-pkg-download-tests:
name: Test Ubuntu 20.04 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-20.04
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2004-arm64-pkg-download-tests:
name: Test Ubuntu 20.04 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-20.04-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2204-pkg-download-tests:
name: Test Ubuntu 22.04 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-22.04
platform: linux
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
ubuntu-2204-arm64-pkg-download-tests:
name: Test Ubuntu 22.04 Arm64 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-22.04-arm64
platform: linux
arch: aarch64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
macos-12-pkg-download-tests:
name: Test macOS 12 Package Downloads
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
@ -2110,15 +2599,17 @@ jobs:
with:
distro-slug: macos-12
platform: darwin
arch: x86_64
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
test-windows-pkg-downloads:
windows-2022-nsis-amd64-pkg-download-tests:
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
name: Test Windows Package Downloads
name: Test Windows 2022 amd64 NSIS Package Downloads
needs:
- prepare-workflow
- publish-repositories
@ -2126,10 +2617,32 @@ jobs:
with:
distro-slug: windows-2022
platform: windows
arch: amd64
pkg-type: NSIS
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
windows-2022-msi-amd64-pkg-download-tests:
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
name: Test Windows 2022 amd64 MSI Package Downloads
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: windows-2022
platform: windows
arch: amd64
pkg-type: MSI
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
publish-pypi:
@ -2182,9 +2695,36 @@ jobs:
- windows-2019-msi-pkg-tests
- windows-2022-nsis-pkg-tests
- windows-2022-msi-pkg-tests
- test-linux-pkg-downloads
- test-macos-pkg-downloads
- test-windows-pkg-downloads
- almalinux-8-pkg-download-tests
- almalinux-8-arm64-pkg-download-tests
- almalinux-9-pkg-download-tests
- almalinux-9-arm64-pkg-download-tests
- amazonlinux-2-pkg-download-tests
- amazonlinux-2-arm64-pkg-download-tests
- centos-7-pkg-download-tests
- centos-7-arm64-pkg-download-tests
- centosstream-8-pkg-download-tests
- centosstream-8-arm64-pkg-download-tests
- centosstream-9-pkg-download-tests
- centosstream-9-arm64-pkg-download-tests
- debian-10-pkg-download-tests
- debian-11-pkg-download-tests
- debian-11-arm64-pkg-download-tests
- fedora-36-pkg-download-tests
- fedora-36-arm64-pkg-download-tests
- fedora-37-pkg-download-tests
- fedora-37-arm64-pkg-download-tests
- fedora-38-pkg-download-tests
- fedora-38-arm64-pkg-download-tests
- photonos-3-pkg-download-tests
- photonos-4-pkg-download-tests
- ubuntu-2004-pkg-download-tests
- ubuntu-2004-arm64-pkg-download-tests
- ubuntu-2204-pkg-download-tests
- ubuntu-2204-arm64-pkg-download-tests
- macos-12-pkg-download-tests
- windows-2022-nsis-amd64-pkg-download-tests
- windows-2022-msi-amd64-pkg-download-tests
environment: staging
runs-on:
- self-hosted

View file

@ -76,7 +76,7 @@
- name: Create Repository
run: |
tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo

View file

@ -52,7 +52,7 @@
- name: Create Repository
run: |
tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo

View file

@ -88,7 +88,7 @@
- name: Create Repository
run: |
tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo

View file

@ -18,6 +18,11 @@
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
<%- if pkg_type in ("macos", "windows") and gh_environment %>
environment: <{ gh_environment }>
<%- if pkg_type == "macos" %>
sign-packages: true
<%- else %>
sign-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %>
<%- endif %>
secrets: inherit
<%- endif %>

View file

@ -93,7 +93,7 @@
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo

View file

@ -52,18 +52,23 @@
- name: Create Repository
run: |
tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Copy Files For Source Only Artifact Uploads
run: |
mkdir artifacts/src
find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \;
- name: Upload Standalone Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo
path: |
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-*
artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.*
artifacts/src/*-GPG-*
retention-days: 7
if-no-files-found: error

View file

@ -70,7 +70,7 @@
- name: Create Repository
run: |
tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo

View file

@ -9,7 +9,6 @@
<%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %>
<%- set gpg_key_id = "64CBBC8173D76B3F" %>
<%- set prepare_actual_release = prepare_actual_release | default(False) %>
<%- set release_branches = ["master", "3006.x"] %>
---
<%- block name %>
name: <{ workflow_name }>
@ -90,6 +89,8 @@ jobs:
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
with:
@ -267,6 +268,18 @@ jobs:
run: |
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Get Salt Releases
id: get-salt-releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-releases
- name: Check Salt Releases
run: |
echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.'
echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.'
- name: Define Testrun
id: define-testrun
run: |
@ -324,6 +337,9 @@ jobs:
id: get-workflow-info
uses: technote-space/workflow-conclusion-action@v3
<%- block set_pipeline_exit_status_extra_steps %>
<%- endblock set_pipeline_exit_status_extra_steps %>
- name: Set Pipeline Exit Status
shell: bash
run: |

View file

@ -1,6 +1,8 @@
<%- set gh_environment = gh_environment|default("nightly") %>
<%- set skip_test_coverage_check = skip_test_coverage_check|default("false") %>
<%- set skip_junit_reports_check = skip_junit_reports_check|default("false") %>
<%- set prepare_workflow_skip_test_suite = "${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}" %>
<%- set prepare_workflow_skip_pkg_test_suite = "${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}" %>
<%- set prepare_workflow_if_check = prepare_workflow_if_check|default("${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}") %>
<%- extends 'ci.yml.jinja' %>
@ -14,7 +16,16 @@ run-name: "<{ workflow_name }> (branch: ${{ github.ref_name }})"
<%- block on %>
on:
workflow_dispatch: {}
workflow_dispatch:
inputs:
skip-salt-test-suite:
type: boolean
default: false
description: Skip running the Salt test suite.
skip-salt-pkg-test-suite:
type: boolean
default: false
description: Skip running the Salt packages test suite.
schedule:
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
- cron: '0 1 * * *' # Every day at 1AM
@ -53,6 +64,7 @@ concurrency:
publish-repositories:
<%- do conclusion_needs.append('publish-repositories') %>
name: Publish Repositories
if: ${{ always() && ! failure() && ! cancelled() }}
runs-on:
- self-hosted
- linux

View file

@ -61,17 +61,6 @@ permissions:
require: admin
username: ${{ github.triggering_actor }}
- name: Check Branch
run: |
echo "Trying to run the staging workflow from branch ${{ github.ref_name }}"
if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then
echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed"
echo "Allowed branches: <{ release_branches|join(', ') }>"
exit 1
else
echo "Allowed to release from branch ${{ github.ref_name }}"
fi
<%- endblock pre_jobs %>
@ -81,7 +70,11 @@ permissions:
prepare-workflow:
name: Prepare Workflow Run
runs-on: ubuntu-latest
runs-on:
- self-hosted
- linux
- repo-<{ gh_environment }>
environment: <{ gh_environment }>
<%- if prepare_workflow_needs %>
needs:
<%- for need in prepare_workflow_needs.iter(consume=False) %>
@ -91,6 +84,8 @@ permissions:
outputs:
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }}
steps:
- uses: actions/checkout@v3
with:
@ -121,6 +116,24 @@ permissions:
run: |
tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}
- name: Check Release Staged
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}
- name: Get Salt Releases
id: get-salt-releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-releases
- name: Check Salt Releases
run: |
echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.'
echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.'
- name: Set Cache Seed Output
id: set-cache-seed
run: |
@ -132,6 +145,54 @@ permissions:
<%- block jobs %>
<{- super() }>
download-onedir-artifact:
name: Download Staging Onedir Artifact
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
runs-on:
- self-hosted
- linux
- repo-<{ gh_environment }>
environment: <{ gh_environment }>
needs:
- prepare-workflow
strategy:
fail-fast: false
matrix:
include:
- platform: linux
arch: x86_64
- platform: linux
arch: aarch64
- platform: windows
arch: amd64
- platform: windows
arch: x86
- platform: darwin
arch: x86_64
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Download Onedir Tarball Artifact
run: |
tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
backup:
name: Backup
runs-on:
@ -141,6 +202,8 @@ permissions:
needs:
- prepare-workflow
environment: <{ gh_environment }>
outputs:
backup-complete: ${{ steps.backup.outputs.backup-complete }}
steps:
- name: Clone The Salt Repository
@ -155,6 +218,7 @@ permissions:
uses: ./.github/actions/setup-python-tools-scripts
- name: Backup Previous Releases
id: backup
run: |
tools pkg repo backup-previous-releases
@ -168,6 +232,7 @@ permissions:
needs:
- prepare-workflow
- backup
- download-onedir-artifact
environment: <{ gh_environment }>
steps:
@ -197,6 +262,7 @@ permissions:
release:
<%- do conclusion_needs.append('release') %>
name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
if: ${{ always() && ! failure() && ! cancelled() }}
runs-on:
- self-hosted
- linux
@ -304,15 +370,17 @@ permissions:
retention-days: 7
if-no-files-found: error
{#- Disable automatic backup restore
restore:
<%- do conclusion_needs.append('restore') %>
name: Restore Release Bucket From Backup
if: ${{ always() }}
if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }}
runs-on:
- self-hosted
- linux
- repo-<{ gh_environment }>
needs:
- backup
- release
<%- for need in test_repo_needs.iter(consume=True) %>
- <{ need }>
@ -320,34 +388,31 @@ permissions:
environment: <{ gh_environment }>
steps:
- name: Clone The Salt Repository
if: ${{ failure() || cancelled() }}
uses: actions/checkout@v3
with:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
- name: Setup Rclone
if: ${{ failure() || cancelled() }}
uses: AnimMouse/setup-rclone@v1
with:
version: v1.61.1
- name: Setup Python Tools Scripts
if: ${{ failure() || cancelled() }}
uses: ./.github/actions/setup-python-tools-scripts
- name: Restore Release Bucket
if: ${{ failure() || cancelled() }}
run: |
tools pkg repo restore-previous-releases
#}
publish-pypi:
<%- do conclusion_needs.append('publish-pypi') %>
name: Publish to PyPi
if: ${{ github.event.repository.fork != true }}
if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }}
needs:
- prepare-workflow
- release
- restore
- release {#- Disable automatic backup restore
- restore #}
environment: <{ gh_environment }>
runs-on:
- self-hosted
@ -396,3 +461,15 @@ permissions:
tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
<%- endblock jobs %>
<%- block set_pipeline_exit_status_extra_steps %>
- run: |
# shellcheck disable=SC2129
if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then
echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}"
echo '```' >> "${GITHUB_STEP_SUMMARY}"
echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}"
echo '```' >> "${GITHUB_STEP_SUMMARY}"
fi
<%- endblock set_pipeline_exit_status_extra_steps %>

View file

@ -27,6 +27,10 @@ on:
description: >
The Salt version to set prior to building packages and staging the release.
(DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0).
sign-windows-packages:
type: boolean
default: false
description: Sign Windows Packages
skip-salt-test-suite:
type: boolean
default: false
@ -67,17 +71,6 @@ concurrency:
require: admin
username: ${{ github.triggering_actor }}
- name: Check Branch
run: |
echo "Trying to run the staging workflow from branch ${{ github.ref_name }}"
if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then
echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed"
echo "Allowed branches: <{ release_branches|join(', ') }>"
exit 1
else
echo "Allowed to release from branch ${{ github.ref_name }}"
fi
<%- endblock pre_jobs %>
@ -132,6 +125,10 @@ concurrency:
name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub
path: artifacts/release
- name: Show Release Artifacts
run: |
tree -a artifacts/release
{#-
- name: Download Release Documentation (PDF)

View file

@ -1,8 +1,41 @@
<%- set job_name = "test-linux-pkg-downloads" %>
<%- set linux_pkg_tests = (
("almalinux-8", "Alma Linux 8", "x86_64"),
("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64"),
("almalinux-9", "Alma Linux 9", "x86_64"),
("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64"),
("amazonlinux-2", "Amazon Linux 2", "x86_64"),
("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"),
("centos-7", "CentOS 7", "x86_64"),
("centos-7-arm64", "CentOS 7 Arm64", "aarch64"),
("centosstream-8", "CentOS Stream 8", "x86_64"),
("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64"),
("centosstream-9", "CentOS Stream 9", "x86_64"),
("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64"),
("debian-10", "Debian 10", "x86_64"),
("debian-11", "Debian 11", "x86_64"),
("debian-11-arm64", "Debian 11 Arm64", "aarch64"),
("fedora-36", "Fedora 36", "x86_64"),
("fedora-36-arm64", "Fedora 36 Arm64", "aarch64"),
("fedora-37", "Fedora 37", "x86_64"),
("fedora-37-arm64", "Fedora 37 Arm64", "aarch64"),
("fedora-38", "Fedora 38", "x86_64"),
("fedora-38-arm64", "Fedora 38 Arm64", "aarch64"),
("photonos-3", "Photon OS 3", "x86_64"),
("photonos-4", "Photon OS 4", "x86_64"),
("ubuntu-20.04", "Ubuntu 20.04", "x86_64"),
("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"),
("ubuntu-22.04", "Ubuntu 22.04", "x86_64"),
("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64")
) %>
<%- for slug, display_name, arch in linux_pkg_tests %>
<%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
name: Test Linux Package Downloads
name: Test <{ display_name }> Package Downloads
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
@ -11,25 +44,32 @@
needs:
- prepare-workflow
- publish-repositories
<%- if gh_environment == "release" %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-latest
distro-slug: <{ slug }>
platform: linux
arch: <{ arch }>
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
<%- if gh_environment == "release" %>
artifacts-from-workflow: staging.yml
<%- endif %>
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
<%- endfor %>
<%- set job_name = "test-macos-pkg-downloads" %>
<%- for slug, display_name, arch in (
("macos-12", "macOS 12", "x86_64"),
) %>
<%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
name: Test macOS Package Downloads
name: Test <{ display_name }> Package Downloads
<%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
@ -38,21 +78,28 @@
needs:
- prepare-workflow
- publish-repositories
<%- if gh_environment == "release" %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/test-package-downloads-action-macos.yml
with:
distro-slug: macos-12
distro-slug: <{ slug }>
platform: darwin
arch: <{ arch }>
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
<%- if gh_environment == "release" %>
artifacts-from-workflow: staging.yml
<%- endif %>
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
<%- endfor %>
<%- set job_name = "test-windows-pkg-downloads" %>
<%- for slug, display_name, arch in (
("windows-2022", "Windows 2022", "amd64"),
) %>
<%- for pkg_type in ("NSIS", "MSI") %>
<%- set job_name = "{}-{}-{}-pkg-download-tests".format(slug.replace(".", ""), pkg_type.lower(), arch.lower()) %>
<{ job_name }>:
<%- do test_repo_needs.append(job_name) %>
@ -61,19 +108,24 @@
<%- else %>
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
<%- endif %>
name: Test Windows Package Downloads
name: Test <{ display_name }> <{ arch }> <{ pkg_type }> Package Downloads
needs:
- prepare-workflow
- publish-repositories
<%- if gh_environment == "release" %>
- download-onedir-artifact
<%- endif %>
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: windows-2022
distro-slug: <{ slug }>
platform: windows
arch: <{ arch }>
pkg-type: <{ pkg_type }>
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
<%- if gh_environment == "release" %>
artifacts-from-workflow: staging.yml
<%- endif %>
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
<%- endfor %>
<%- endfor %>

View file

@ -227,6 +227,8 @@ jobs:
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
@ -515,10 +517,10 @@ jobs:
run: |
tree -a artifacts
- name: Set up Python 3.9
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "3.9"
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |

View file

@ -248,6 +248,8 @@ jobs:
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: PyPi Proxy
run: |
@ -490,11 +492,6 @@ jobs:
run: |
tree -a artifacts
- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: "3.9"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ env.NOX_VERSION }}'

View file

@ -11,6 +11,10 @@ on:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
@ -23,11 +27,10 @@ on:
required: true
type: string
description: The environment to run tests against
python-version:
required: false
latest-release:
required: true
type: string
description: The python version to run tests with
default: "3.10"
description: The latest salt release
package-name:
required: false
type: string
@ -43,13 +46,6 @@ on:
type: string
description: The nox session to run
default: test-pkgs-onedir
artifacts-from-workflow:
required: false
type: string
description: >
Which workflow to download artifacts from. An empty string means the
current workflow run.
default: ""
env:
@ -62,37 +58,13 @@ env:
jobs:
generate-matrix:
name: Generate Package Test Matrix
runs-on: ubuntu-latest
outputs:
arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }}
test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Generate Package Test Matrix
id: generate-pkg-matrix
run: |
tools ci pkg-download-matrix linux
dependencies:
name: Setup Test Dependencies
needs:
- generate-matrix
runs-on:
- self-hosted
- linux
- ${{ matrix.arch }}
- bastion
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
@ -102,24 +74,12 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
- name: Download Onedir Tarball as an Artifact
if: inputs.artifacts-from-workflow == ''
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Onedir Tarball as an Artifact(from a different workflow)
if: inputs.artifacts-from-workflow != ''
uses: dawidd6/action-download-artifact@v2
with:
workflow: ${{ inputs.artifacts-from-workflow }}
workflow_conclusion: ""
branch: ${{ github.event.ref }}
if_no_artifact_found: fail
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
@ -128,43 +88,64 @@ jobs:
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Install Nox
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
python3 -m pip install 'nox==${{ env.NOX_VERSION }}'
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
run: |
nox --force-color --install-only -e ${{ inputs.nox-session }}
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e "pre-archive-cleanup(pkg=False)"
tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
@ -172,16 +153,11 @@ jobs:
runs-on:
- self-hosted
- linux
- ${{ matrix.arch }}
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- dependencies
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }}
steps:
- name: Checkout Source Code
@ -190,102 +166,128 @@ jobs:
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ env.NOX_VERSION }}'
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }}
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info & Test Plan
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "1"
PRINT_TEST_PLAN_ONLY: "1"
PRINT_SYSTEM_INFO: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
run: |
nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs
- name: Run Package Download Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
SALT_REPO_ARCH: ${{ inputs.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
run: |
nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs
- name: Fix file ownership
- name: Run Package Download Tests
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ inputs.arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
run: |
sudo chown -R "$(id -un)" .
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Prepare Test Run Artifacts
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Upload Test Run Artifacts
if: always() && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }}
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }})
runs-on: ubuntu-latest
name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }})
runs-on:
- self-hosted
- linux
- x86_64
environment: ${{ inputs.environment }}
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- test
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }}
steps:
- name: Checkout Source Code
@ -295,7 +297,7 @@ jobs:
id: download-test-run-artifacts
uses: actions/download-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }}
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: artifacts
- name: Show Test Run Artifacts
@ -303,12 +305,6 @@ jobs:
run: |
tree -a artifacts
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
update-environment: true
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ env.NOX_VERSION }}'
@ -318,6 +314,6 @@ jobs:
# always run even if the previous steps fails
if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success'
with:
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }})
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -11,6 +11,10 @@ on:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
@ -23,6 +27,10 @@ on:
required: true
type: string
description: The environment to run tests against
latest-release:
required: true
type: string
description: The latest salt release
python-version:
required: false
type: string
@ -43,13 +51,6 @@ on:
type: string
description: The nox session to run
default: test-pkgs-onedir
artifacts-from-workflow:
required: false
type: string
description: >
Which workflow to download artifacts from. An empty string means the
current workflow run.
default: ""
env:
@ -62,34 +63,10 @@ env:
jobs:
generate-matrix:
name: Generate Package Test Matrix
runs-on: ubuntu-latest
outputs:
arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }}
test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Generate Package Test Matrix
id: generate-pkg-matrix
run: |
tools ci pkg-download-matrix macos
dependencies:
name: Setup Test Dependencies
needs:
- generate-matrix
runs-on: ${{ inputs.distro-slug }}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
@ -99,24 +76,12 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
- name: Download Onedir Tarball as an Artifact
if: inputs.artifacts-from-workflow == ''
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Onedir Tarball as an Artifact(from a different workflow)
if: inputs.artifacts-from-workflow != ''
uses: dawidd6/action-download-artifact@v2
with:
workflow: ${{ inputs.artifacts-from-workflow }}
workflow_conclusion: ""
branch: ${{ github.event.ref }}
if_no_artifact_found: fail
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
@ -125,7 +90,7 @@ jobs:
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
@ -166,18 +131,10 @@ jobs:
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
@ -187,11 +144,6 @@ jobs:
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- dependencies
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }}
steps:
- name: Checkout Source Code
@ -200,8 +152,8 @@ jobs:
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Install System Dependencies
run: |
@ -212,7 +164,7 @@ jobs:
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
@ -228,7 +180,9 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Decompress .nox Directory
run: |
@ -236,6 +190,7 @@ jobs:
- name: Show System Info & Test Plan
env:
SALT_RELEASE: "${{ inputs.salt-version }}"
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "1"
PRINT_TEST_PLAN_ONLY: "1"
@ -243,6 +198,7 @@ jobs:
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs
@ -257,7 +213,8 @@ jobs:
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.arch }}
SALT_REPO_ARCH: ${{ inputs.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
@ -288,24 +245,19 @@ jobs:
if: always() && job.status != 'cancelled'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }}
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }})
name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }})
runs-on: ubuntu-latest
environment: ${{ inputs.environment }}
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- test
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }}
steps:
- name: Checkout Source Code
@ -315,7 +267,7 @@ jobs:
id: download-test-run-artifacts
uses: actions/download-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }}
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: artifacts
- name: Show Test Run Artifacts
@ -337,6 +289,6 @@ jobs:
# always run even if the previous steps fails
if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success'
with:
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }})
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -11,6 +11,14 @@ on:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
pkg-type:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
@ -23,11 +31,10 @@ on:
required: true
type: string
description: The environment to run tests against
python-version:
required: false
latest-release:
required: true
type: string
description: The python version to run tests with
default: "3.10"
description: The latest salt release
package-name:
required: false
type: string
@ -48,13 +55,6 @@ on:
type: boolean
description: Skip Publishing JUnit Reports
default: false
artifacts-from-workflow:
required: false
type: string
description: >
Which workflow to download artifacts from. An empty string means the
current workflow run.
default: ""
env:
@ -67,40 +67,13 @@ env:
jobs:
generate-matrix:
name: Generate Package Test Matrix
runs-on:
- self-hosted
- linux
- x86_64
outputs:
arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }}
test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Generate Package Test Matrix
id: generate-pkg-matrix
run: |
tools ci pkg-download-matrix windows
dependencies:
name: Setup Test Dependencies
needs:
- generate-matrix
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
@ -110,24 +83,12 @@ jobs:
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
- name: Download Onedir Tarball as an Artifact
if: inputs.artifacts-from-workflow == ''
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Onedir Tarball as an Artifact(from a different workflow)
if: inputs.artifacts-from-workflow != ''
uses: dawidd6/action-download-artifact@v2
with:
workflow: ${{ inputs.artifacts-from-workflow }}
workflow_conclusion: ""
branch: ${{ github.event.ref }}
if_no_artifact_found: fail
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
@ -136,7 +97,7 @@ jobs:
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
@ -190,18 +151,10 @@ jobs:
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
name: nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-download-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
@ -213,12 +166,7 @@ jobs:
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- generate-matrix
- dependencies
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }}
steps:
- name: Checkout Source Code
@ -227,7 +175,7 @@ jobs:
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
@ -235,13 +183,15 @@ jobs:
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -271,9 +221,10 @@ jobs:
- name: Show System Info & Test Plan
env:
INSTALL_TYPE: ${{ matrix.install_type }}
INSTALL_TYPE: ${{ inputs.pkg-type }}
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.install_arch }}
SALT_REPO_ARCH: ${{ inputs.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
@ -288,9 +239,10 @@ jobs:
- name: Run Package Download Tests
env:
INSTALL_TYPE: ${{ matrix.install_type }}
INSTALL_TYPE: ${{ inputs.pkg-type }}
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.install_arch }}
SALT_REPO_ARCH: ${{ inputs.arch }}
LATEST_SALT_RELEASE: "${{ inputs.latest-release }}"
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
@ -331,24 +283,22 @@ jobs:
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }}
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: |
artifacts
!artifacts/salt/*
!artifacts/salt-*.tar.*
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }})
runs-on: ubuntu-latest
name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }})
runs-on:
- self-hosted
- linux
- x86_64
environment: ${{ inputs.environment }}
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- test
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }}
steps:
- name: Checkout Source Code
@ -358,7 +308,7 @@ jobs:
id: download-test-run-artifacts
uses: actions/download-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }}
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }}
path: artifacts
- name: Show Test Run Artifacts
@ -366,11 +316,6 @@ jobs:
run: |
tree -a artifacts
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ env.NOX_VERSION }}'
@ -380,6 +325,6 @@ jobs:
# always run even if the previous steps fails
if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success'
with:
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }})
check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }} ${{ inputs.pkg-type }} )
report_paths: 'artifacts/xml-unittests-output/*.xml'
annotate_only: true

View file

@ -154,7 +154,7 @@ jobs:
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
@ -214,6 +214,8 @@ jobs:
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Decompress .nox Directory
run: |

View file

@ -166,10 +166,19 @@ jobs:
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }}
- name: Define Nox Upload Artifact Name
id: nox-artifact-name
run: |
if [ "${{ contains(inputs.distro-slug, 'windows') }}" != "true" ]; then
echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}"
else
echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}"
fi
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
name: ${{ steps.nox-artifact-name.outputs.name }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
@ -219,6 +228,8 @@ jobs:
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
# If we get a cache miss here it means the dependencies step failed to save the cache
fail-on-cache-miss: true
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -63,7 +63,7 @@ repos:
- id: tools
alias: generate-workflows
name: Generate GitHub Workflow Templates
files: ^(tools/pre_commit\.py|.github/workflows/templates/.*)$
files: ^(cicd/shared-gh-workflows-context\.yml|tools/pre_commit\.py|.github/workflows/templates/.*)$
pass_filenames: false
args:
- pre-commit

View file

@ -7,6 +7,42 @@ Versions are `MAJOR.PATCH`.
# Changelog
## 3006.1 (2023-05-05)
### Fixed
- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236)
- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477)
- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589)
- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785)
- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810)
- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081)
- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082)
- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus.
This event is only used when these functions are called via the schedule execution modules.
Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103)
- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109)
- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111)
- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager
whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113)
- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114)
- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117)
- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118)
- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126)
- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141)
- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150)
- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158)
- Fixed issue in salt-cloud so that multiple masters specified in the cloud
are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170)
- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184)
- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186)
- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192)
- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200)
- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204)
- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222)
## 3006.0 (2023-04-18)

View file

@ -1 +0,0 @@
Ensure NamedLoaderContext's have their value() used if passing to other modules

View file

@ -1 +0,0 @@
add documentation note about reactor state ids.

View file

@ -1 +0,0 @@
Fix dmsetup device names with hyphen being picked up.

View file

@ -1 +0,0 @@
Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments.

View file

@ -1 +0,0 @@
Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package

View file

@ -1 +0,0 @@
Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg`

View file

@ -1 +0,0 @@
lgpo_reg.set_value now returns ``True`` on success instead of ``None``

View file

@ -1 +0,0 @@
Fix cmd.run doesn't output changes in test mode

View file

@ -1,2 +0,0 @@
Fixed issue in salt-cloud so that multiple masters specified in the cloud
are written to the minion config properly

View file

@ -97,7 +97,7 @@
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "cloud-user"
"ssh_username": "centos"
},
"centosstream-8": {
"ami": "ami-055e35dc7180defad",

View file

@ -1,4 +1,4 @@
python_version_linux: "3.10.11"
python_version_macos: "3.10.11"
python_version_windows: "3.10.11"
relenv_version: "0.11.2"
relenv_version: "0.12.3"

View file

@ -1025,6 +1025,7 @@
# If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount
# of time, in seconds, before the cache is considered invalid by a master and a fresh
# pillar is recompiled and stored.
# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires.
#pillar_cache_ttl: 3600
# If and only if a master has set `pillar_cache: True`, one of several storage providers

View file

@ -950,6 +950,7 @@ syndic_user: salt
# If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount
# of time, in seconds, before the cache is considered invalid by a master and a fresh
# pillar is recompiled and stored.
# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires.
#pillar_cache_ttl: 3600
# If and only if a master has set `pillar_cache: True`, one of several storage providers

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-API" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-API" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-api \- salt-api Command
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-CALL" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-CALL" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-call \- salt-call Documentation
.SH SYNOPSIS

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-CLOUD" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-CLOUD" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-cloud \- Salt Cloud Command
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-CP" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-CP" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-cp \- salt-cp Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-KEY" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-KEY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-key \- salt-key Documentation
.SH SYNOPSIS

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-MASTER" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-MASTER" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-master \- salt-master Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-MINION" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-MINION" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-minion \- salt-minion Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-PROXY" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-PROXY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-proxy \- salt-proxy Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-RUN" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-RUN" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-run \- salt-run Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-SSH" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-SSH" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-ssh \- salt-ssh Documentation
.SH SYNOPSIS

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-SYNDIC" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT-SYNDIC" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt-syndic \- salt-syndic Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt \- salt
.SH SYNOPSIS

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT" "7" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SALT" "7" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
salt \- Salt Documentation
.SH SALT PROJECT
@ -1346,7 +1346,7 @@ You can enable or disable test groups locally by passing their respected flag:
\-\-flaky\-jail \- Test that need to be temporarily skipped.
.UNINDENT
.sp
In Your PR, you can enable or disable test groups by setting a label.
In your PR, you can enable or disable test groups by setting a label.
All fast, slow, and core tests specified in the change file will always run.
.INDENT 0.0
.IP \(bu 2
@ -1380,7 +1380,7 @@ But that advice is backwards for the changelog. We follow the
our changelog, and use towncrier to generate it for each release. As a
contributor, all that means is that you need to add a file to the
\fBsalt/changelog\fP directory, using the \fB<issue #>.<type>\fP format. For
instanch, if you fixed issue 123, you would do:
instance, if you fixed issue 123, you would do:
.INDENT 0.0
.INDENT 3.5
.sp
@ -7377,6 +7377,7 @@ Default: \fB3600\fP
If and only if a master has set \fBpillar_cache: True\fP, the cache TTL controls the amount
of time, in seconds, before the cache is considered invalid by a master and a fresh
pillar is recompiled and stored.
The cache TTL does not prevent pillar cache from being refreshed before its TTL expires.
.SS \fBpillar_cache_backend\fP
.sp
New in version 2015.8.8.
@ -11635,10 +11636,7 @@ Changed in version 2018.3.0: Renamed from \fBenvironment\fP to \fBsaltenv\fP\&.
ignored and \fBsaltenv\fP will be used.
.sp
Normally the minion is not isolated to any single environment on the master
when running states, but the environment can be isolated on the minion side
by statically setting it. Remember that the recommended way to manage
environments is to isolate via the top file.
The default fileserver environment to use when copying files and applying states.
.INDENT 0.0
.INDENT 3.5
.sp
@ -14934,6 +14932,7 @@ For reference, see:
# If and only if a master has set \(ga\(gapillar_cache: True\(ga\(ga, the cache TTL controls the amount
# of time, in seconds, before the cache is considered invalid by a master and a fresh
# pillar is recompiled and stored.
# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires.
#pillar_cache_ttl: 3600
# If and only if a master has set \(gapillar_cache: True\(ga, one of several storage providers
@ -50676,7 +50675,7 @@ You can enable or disable test groups locally by passing there respected flag:
\-\-flaky\-jail
.UNINDENT
.sp
In Your PR you can enable or disable test groups by setting a label.
In your PR you can enable or disable test groups by setting a label.
All thought the fast, slow and core tests specified in the change file will always run.
.INDENT 0.0
.IP \(bu 2
@ -61394,7 +61393,7 @@ Add the following to \fB/srv/reactor/revert.sls\fP:
.ft C
revert\-file:
local.state.apply:
\- tgt: {{ data[\(aqdata\(aq][\(aqid\(aq] }}
\- tgt: {{ data[\(aqid\(aq] }}
\- arg:
\- maintain_important_file
.ft P
@ -61411,13 +61410,6 @@ to modify the watched file, it is important to ensure the state applied is
also \fI\%idempotent\fP\&.
.UNINDENT
.UNINDENT
.sp
\fBNOTE:\fP
.INDENT 0.0
.INDENT 3.5
The expression \fB{{ data[\(aqdata\(aq][\(aqid\(aq] }}\fP \fI\%is correct\fP as it matches the event structure \fI\%shown above\fP\&.
.UNINDENT
.UNINDENT
.SS State SLS
.sp
Create the state sls file referenced by the reactor sls file. This state file
@ -61838,6 +61830,14 @@ in \fI\%local reactions\fP, but as noted above this is not very
user\-friendly. Therefore, the new config schema is recommended if the master
is running a supported release.
.sp
\fBNOTE:\fP
.INDENT 0.0
.INDENT 3.5
State ids of reactors for runners and wheels should all be unique. They can
overwrite each other when added to the async queue causing lost reactions.
.UNINDENT
.UNINDENT
.sp
The below two examples are equivalent:
.TS
center;
@ -61909,6 +61909,14 @@ Like \fI\%runner reactions\fP, the old config schema called for
wheel reactions to have arguments passed directly under the name of the
\fI\%wheel function\fP (or in \fBarg\fP or \fBkwarg\fP parameters).
.sp
\fBNOTE:\fP
.INDENT 0.0
.INDENT 3.5
State ids of reactors for runners and wheels should all be unique. They can
overwrite each other when added to the async queue causing lost reactions.
.UNINDENT
.UNINDENT
.sp
The below two examples are equivalent:
.TS
center;
@ -193929,7 +193937,7 @@ Passes through all the parameters described in the
\fI\%utils.http.query function\fP:
.INDENT 7.0
.TP
.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.0\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs)
.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.1\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs)
Query a resource, and decode the return data
.UNINDENT
.INDENT 7.0
@ -280078,6 +280086,10 @@ proceess, as grains can affect which modules are available.
.B refresh_pillar
True
Set to \fBFalse\fP to keep pillar data from being refreshed.
.TP
.B clean_pillar_cache
False
Set to \fBTrue\fP to refresh pillar cache.
.UNINDENT
.sp
CLI Examples:
@ -280273,7 +280285,7 @@ salt \(aq*\(aq saltutil.signal_job <job id> 15
.UNINDENT
.INDENT 0.0
.TP
.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None)
.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False)
Changed in version 2015.8.11,2016.3.2: On masterless minions, pillar modules are now synced, and refreshed
when \fBrefresh\fP is set to \fBTrue\fP\&.
@ -280323,6 +280335,10 @@ dictionary of modules to sync based on type
.B extmod_blacklist
None
dictionary of modules to blacklist based on type
.TP
.B clean_pillar_cache
False
Set to \fBTrue\fP to refresh pillar cache.
.UNINDENT
.sp
CLI Examples:
@ -280519,7 +280535,7 @@ salt \(aq*\(aq saltutil.sync_executors saltenv=base,dev
.UNINDENT
.INDENT 0.0
.TP
.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None)
.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False)
New in version 0.10.0.
.sp
@ -280547,6 +280563,10 @@ comma\-separated list of modules to sync
.B extmod_blacklist
None
comma\-separated list of modules to blacklist based on type
.TP
.B clean_pillar_cache
False
Set to \fBTrue\fP to refresh pillar cache.
.UNINDENT
.sp
CLI Examples:
@ -280815,7 +280835,7 @@ salt \(aq*\(aq saltutil.sync_output saltenv=base,dev
.UNINDENT
.INDENT 0.0
.TP
.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None)
.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False)
New in version 2015.8.11,2016.3.2.
.sp
@ -280837,6 +280857,10 @@ comma\-separated list of modules to sync
.B extmod_blacklist
None
comma\-separated list of modules to blacklist based on type
.TP
.B clean_pillar_cache
False
Set to \fBTrue\fP to refresh pillar cache.
.UNINDENT
.sp
\fBNOTE:\fP
@ -325208,6 +325232,14 @@ User
.sp
Default is \fBMachine\fP
.UNINDENT
.TP
.B Raises
.INDENT 7.0
.IP \(bu 2
\fI\%SaltInvocationError\fP \-\- Invalid policy_class
.IP \(bu 2
\fI\%CommandExecutionError\fP \-\- On failure
.UNINDENT
.TP
.B Returns
@ -325216,9 +325248,6 @@ None: Key/value not present
.TP
.B Return type
\fI\%bool\fP
.TP
.B Raises
\fI\%SaltInvocationError\fP \-\- Invalid policy_class
.UNINDENT
.sp
CLI Example:
@ -325264,6 +325293,14 @@ User
.sp
Default is \fBMachine\fP
.UNINDENT
.TP
.B Raises
.INDENT 7.0
.IP \(bu 2
\fI\%SaltInvocationError\fP \-\- Invalid policy_class
.IP \(bu 2
\fI\%CommandExecutionError\fP \-\- On failure
.UNINDENT
.TP
.B Returns
@ -325272,9 +325309,6 @@ None: If already disabled
.TP
.B Return type
\fI\%bool\fP
.TP
.B Raises
\fI\%SaltInvocationError\fP \-\- Invalid policy_class
.UNINDENT
.sp
CLI Example:
@ -325470,12 +325504,6 @@ Default is \fBMachine\fP
.UNINDENT
.TP
.B Returns
\fBTrue\fP if successful, otherwise \fBFalse\fP
.TP
.B Return type
\fI\%bool\fP
.TP
.B Raises
.INDENT 7.0
.IP \(bu 2
@ -325485,6 +325513,12 @@ Default is \fBMachine\fP
.IP \(bu 2
\fI\%SaltInvocationError\fP \-\- v_data doesn\(aqt match v_type
.UNINDENT
.TP
.B Returns
\fBTrue\fP if successful, otherwise \fBFalse\fP
.TP
.B Return type
\fI\%bool\fP
.UNINDENT
.sp
CLI Example:
@ -325533,10 +325567,18 @@ Default is \fBMachine\fP
.UNINDENT
.TP
.B Raises
.INDENT 7.0
.IP \(bu 2
\fI\%SaltInvocationError\fP \-\- Invalid policy class
.IP \(bu 2
\fI\%CommandExecutionError\fP \-\- On failure
.UNINDENT
.TP
.B Returns
None
True if successful
.TP
.B Return type
\fI\%bool\fP
.UNINDENT
.sp
CLI Example:
@ -412564,6 +412606,8 @@ tomdroid\-src\-0.7.3.tar.gz:
.fi
.UNINDENT
.UNINDENT
.sp
source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server.
.UNINDENT
.UNINDENT
.INDENT 7.0
@ -444129,6 +444173,13 @@ If your service states are running into trouble with init system detection,
please see the \fI\%Overriding Virtual Module Providers\fP
section of Salt\(aqs module documentation to work around possible errors.
.sp
For services managed by systemd, the systemd_service module includes a built\-in
feature to reload the daemon when unit files are changed or extended. This
feature is used automatically by the service state and the systemd_service
module when running on a systemd minion, so there is no need to set up your own
methods of reloading the daemon. If you need to manually reload the daemon for
some reason, you can use the \fI\%systemd_service.systemctl_reload\fP function provided by Salt.
.sp
\fBNOTE:\fP
.INDENT 0.0
.INDENT 3.5
@ -466637,7 +466688,7 @@ You can enable or disable test groups locally by passing their respected flag:
\-\-flaky\-jail \- Test that need to be temporarily skipped.
.UNINDENT
.sp
In Your PR, you can enable or disable test groups by setting a label.
In your PR, you can enable or disable test groups by setting a label.
All fast, slow, and core tests specified in the change file will always run.
.INDENT 0.0
.IP \(bu 2
@ -466671,7 +466722,7 @@ But that advice is backwards for the changelog. We follow the
our changelog, and use towncrier to generate it for each release. As a
contributor, all that means is that you need to add a file to the
\fBsalt/changelog\fP directory, using the \fB<issue #>.<type>\fP format. For
instanch, if you fixed issue 123, you would do:
instance, if you fixed issue 123, you would do:
.INDENT 0.0
.INDENT 3.5
.sp
@ -476582,6 +476633,66 @@ Update to \fBmarkdown\-it\-py==2.2.0\fP due to:
.UNINDENT
.UNINDENT
.UNINDENT
(release\-3006.1)=
.SS Salt 3006.1 release notes
.SS Changelog
.SS Fixed
.INDENT 0.0
.IP \(bu 2
Check that the return data from the cloud create function is a dictionary before attempting to pull values out. \fI\%#61236\fP
.IP \(bu 2
Ensure NamedLoaderContext\(aqs have their value() used if passing to other modules \fI\%#62477\fP
.IP \(bu 2
add documentation note about reactor state ids. \fI\%#63589\fP
.IP \(bu 2
Added support for \fBtest=True\fP to the \fBfile.cached\fP state module \fI\%#63785\fP
.IP \(bu 2
Updated \fBsource_hash\fP documentation and added a log warning when \fBsource_hash\fP is used with a source other than \fBhttp\fP, \fBhttps\fP and \fBftp\fP\&. \fI\%#63810\fP
.IP \(bu 2
Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. \fI\%#64081\fP
.IP \(bu 2
Fix dmsetup device names with hyphen being picked up. \fI\%#64082\fP
.IP \(bu 2
Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus.
This event is only used when these functions are called via the schedule execution modules.
Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. \fI\%#64102\fP, \fI\%#64103\fP
.IP \(bu 2
Default to a 0 timeout if none is given for the terraform roster to avoid \fB\-o ConnectTimeout=None\fP when using \fBsalt\-ssh\fP \fI\%#64109\fP
.IP \(bu 2
Disable class level caching of the file client on \fBSaltCacheLoader\fP and properly use context managers to take care of initialization and termination of the file client. \fI\%#64111\fP
.IP \(bu 2
Fixed several file client uses which were not properly terminating it by switching to using it as a context manager
whenever possible or making sure \fB\&.destroy()\fP was called when using a context manager was not possible. \fI\%#64113\fP
.IP \(bu 2
Fix running \fI\%setup.py\fP when passing in \-\-salt\-config\-dir and \-\-salt\-cache\-dir arguments. \fI\%#64114\fP
.IP \(bu 2
Moved /etc/salt/proxy and /lib/systemd/system/salt\-proxy@.service to the salt\-minion DEB package \fI\%#64117\fP
.IP \(bu 2
Stop passing \fB**kwargs\fP and be explicit about the keyword arguments to pass, namely, to \fBcp.cache_file\fP call in \fBsalt.states.pkg\fP \fI\%#64118\fP
.IP \(bu 2
lgpo_reg.set_value now returns \fBTrue\fP on success instead of \fBNone\fP \fI\%#64126\fP
.IP \(bu 2
Make salt user\(aqs home /opt/saltstack/salt \fI\%#64141\fP
.IP \(bu 2
Fix cmd.run doesn\(aqt output changes in test mode \fI\%#64150\fP
.IP \(bu 2
Move salt user and group creation to common package \fI\%#64158\fP
.IP \(bu 2
Fixed issue in salt\-cloud so that multiple masters specified in the cloud
are written to the minion config properly \fI\%#64170\fP
.IP \(bu 2
Make sure the \fBsalt\-ssh\fP CLI calls it\(aqs \fBfsclient.destroy()\fP method when done. \fI\%#64184\fP
.IP \(bu 2
Stop using the deprecated \fBsalt.transport.client\fP imports. \fI\%#64186\fP
.IP \(bu 2
Add a \fB\&.pth\fP to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. \fI\%#64192\fP
.IP \(bu 2
Fix \fBlgpo_reg\fP state to work with User policy \fI\%#64200\fP
.IP \(bu 2
Cloud deployment directories are owned by salt user and group \fI\%#64204\fP
.IP \(bu 2
\fBlgpo_reg\fP state now enforces and reports changes to the registry \fI\%#64222\fP
.UNINDENT
.sp
See \fI\%Install a release candidate\fP
for more information about installing an RC when one is available.

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SPM" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt"
.TH "SPM" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt"
.SH NAME
spm \- Salt Package Manager Command
.sp

View file

@ -5013,6 +5013,7 @@ Default: ``3600``
If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount
of time, in seconds, before the cache is considered invalid by a master and a fresh
pillar is recompiled and stored.
The cache TTL does not prevent pillar cache from being refreshed before its TTL expires.
.. conf_master:: pillar_cache_backend

View file

@ -0,0 +1,52 @@
(release-3006.1)=
# Salt 3006.1 release notes
<!---
Do not edit this file. This is auto generated.
Edit the templates in doc/topics/releases/templates/
for a given release.
-->
<!--
Add release specific details below
-->
<!--
Do not edit the changelog below.
This is auto generated.
-->
## Changelog
### Fixed
- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236)
- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477)
- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589)
- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785)
- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810)
- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081)
- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082)
- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus.
This event is only used when these functions are called via the schedule execution modules.
Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103)
- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109)
- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111)
- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager
whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113)
- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114)
- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117)
- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118)
- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126)
- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141)
- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150)
- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158)
- Fixed issue in salt-cloud so that multiple masters specified in the cloud
are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170)
- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184)
- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186)
- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192)
- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200)
- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204)
- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222)

View file

@ -0,0 +1,15 @@
(release-3006.1)=
# Salt 3006.1 release notes{{ unreleased }}
{{ warning }}
<!--
Add release specific details below
-->
<!--
Do not edit the changelog below.
This is auto generated.
-->
## Changelog
{{ changelog }}

View file

@ -1025,6 +1025,7 @@ user: salt
# If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount
# of time, in seconds, before the cache is considered invalid by a master and a fresh
# pillar is recompiled and stored.
# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires.
#pillar_cache_ttl: 3600
# If and only if a master has set `pillar_cache: True`, one of several storage providers

View file

@ -148,7 +148,6 @@ ci:
- "**/site-packages/salt/modules/runit.py*"
- "**/site-packages/salt/modules/s6.py*"
- "**/site-packages/salt/modules/scsi.py*"
- "**/site-packages/salt/modules/seed.py*"
- "**/site-packages/salt/modules/sensors.py*"
- "**/site-packages/salt/modules/service.py*"
- "**/site-packages/salt/modules/shadow.py*"

View file

@ -0,0 +1 @@
import _salt_onedir_extras; _salt_onedir_extras.setup(__file__)

View file

@ -0,0 +1,18 @@
import pathlib
import sys
def setup(pth_file_path):
# Discover the extras-<py-major>.<py-minor> directory
extras_parent_path = pathlib.Path(pth_file_path).resolve().parent.parent
if not sys.platform.startswith("win"):
extras_parent_path = extras_parent_path.parent
extras_path = str(extras_parent_path / "extras-{}.{}".format(*sys.version_info))
if extras_path in sys.path and sys.path[0] != extras_path:
# The extras directory must come first
sys.path.remove(extras_path)
if extras_path not in sys.path:
sys.path.insert(0, extras_path)

View file

@ -1,3 +1,41 @@
salt (3006.1) stable; urgency=medium
# Fixed
* Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236)
* Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477)
* add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589)
* Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785)
* Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810)
* Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081)
* Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082)
* Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus.
This event is only used when these functions are called via the schedule execution modules.
Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103)
* Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109)
* Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111)
* Fixed several file client uses which were not properly terminating it by switching to using it as a context manager
whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113)
* Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114)
* Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117)
* Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118)
* lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126)
* Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141)
* Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150)
* Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158)
* Fixed issue in salt-cloud so that multiple masters specified in the cloud
are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170)
* Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184)
* Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186)
* Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192)
* Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200)
* Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204)
* ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222)
-- Salt Project Packaging <saltproject-packaging@vmware.com> Fri, 05 May 2023 17:44:35 +0000
salt (3006.0) stable; urgency=medium

View file

@ -29,6 +29,7 @@ override_dh_auto_build:
build/onedir/venv/bin/tools pkg build salt-onedir . --package-name build/onedir/salt --platform linux
build/onedir/venv/bin/tools pkg pre-archive-cleanup --pkg build/onedir/salt
else
override_dh_auto_build:
# The relenv onedir is being provided, all setup up until Salt is installed
@ -38,6 +39,7 @@ override_dh_auto_build:
# Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the <onedir>/bin directory
find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \;
endif
# dh_auto_install tries to invoke distutils causing failures.
@ -47,4 +49,9 @@ override_dh_auto_install:
override_dh_install:
mkdir -p debian/salt-common/opt/saltstack
cp -R build/onedir/salt debian/salt-common/opt/saltstack/
# Generate master config
mkdir -p debian/salt-master/etc/salt
sed 's/#user: root/user: salt/g' conf/master > debian/salt-master/etc/salt/master
dh_install

View file

@ -0,0 +1,5 @@
case "$1" in
configure)
chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy
;;
esac

View file

@ -0,0 +1,39 @@
case "$1" in
install|upgrade)
[ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt
[ -z "$SALT_USER" ] && SALT_USER=salt
[ -z "$SALT_NAME" ] && SALT_NAME="Salt"
[ -z "$SALT_GROUP" ] && SALT_GROUP=salt
# create user to avoid running server as root
# 1. create group if not existing
if ! getent group | grep -q "^$SALT_GROUP:" ; then
echo -n "Adding group $SALT_GROUP.."
addgroup --quiet --system $SALT_GROUP 2>/dev/null ||true
echo "..done"
fi
# 2. create homedir if not existing
test -d $SALT_HOME || mkdir -p $SALT_HOME
# 3. create user if not existing
if ! getent passwd | grep -q "^$SALT_USER:"; then
echo -n "Adding system user $SALT_USER.."
useradd --system \
--no-create-home \
-s /sbin/nologin \
-g $SALT_GROUP \
$SALT_USER 2>/dev/null || true
echo "..done"
fi
# 4. adjust passwd entry
usermod -c "$SALT_NAME" \
-d $SALT_HOME \
-g $SALT_GROUP \
$SALT_USER
# 5. adjust file and directory permissions
if ! dpkg-statoverride --list $SALT_HOME >/dev/null
then
chown -R $SALT_USER:$SALT_GROUP $SALT_HOME
chmod u=rwx,g=rwx,o=rx $SALT_HOME
fi
;;
esac

View file

@ -1,2 +1 @@
pkg/common/conf/master /etc/salt
pkg/common/salt-master.service /lib/systemd/system

View file

@ -1,3 +1,6 @@
adduser --system salt --group
chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt
if command -v systemctl; then systemctl enable salt-master; fi
case "$1" in
configure)
chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt
if command -v systemctl; then systemctl enable salt-master; fi
;;
esac

View file

@ -250,6 +250,7 @@ else
fi
PKG_FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH.pkg"
if [ "${SIGN}" -eq 1 ]; then
_msg "Building the product package (signed)"
# This is not a nightly build, so we want to sign it
@ -260,7 +261,7 @@ if [ "${SIGN}" -eq 1 ]; then
--version="$VERSION" \
--sign "$DEV_INSTALL_CERT" \
--timestamp \
"$FILE" > "$CMD_OUTPUT" 2>&1; then
"$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then
_success
else
_failure
@ -268,12 +269,11 @@ if [ "${SIGN}" -eq 1 ]; then
else
_msg "Building the product package (unsigned)"
# This is a nightly build, so we don't sign it
FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH-unsigned.pkg"
if productbuild --resources="$SCRIPT_DIR/pkg-resources" \
--distribution="$DIST_XML" \
--package-path="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" \
--version="$VERSION" \
"$FILE" > "$CMD_OUTPUT" 2>&1; then
"$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then
_success
else
_failure

View file

@ -14,6 +14,10 @@
%global __requires_exclude_from ^.*\\.so.*$
%define _source_payload w2.gzdio
%define _binary_payload w2.gzdio
%define _SALT_GROUP salt
%define _SALT_USER salt
%define _SALT_NAME Salt
%define _SALT_HOME /opt/saltstack/salt
# Disable python bytecompile for MANY reasons
%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g')
@ -21,7 +25,7 @@
%define fish_dir %{_datadir}/fish/vendor_functions.d
Name: salt
Version: 3006.0
Version: 3006.1
Release: 0
Summary: A parallel remote execution system
Group: System Environment/Daemons
@ -43,9 +47,13 @@ BuildRequires: python3
BuildRequires: python3-pip
BuildRequires: openssl
BuildRequires: git
# rhel is not defined on all rpm based distros.
%if %{?rhel:1}%{!?rhel:0}
%if %{rhel} >= 9
BuildRequires: libxcrypt-compat
%endif
%endif
%description
Salt is a distributed remote execution system used to execute commands and
@ -140,8 +148,12 @@ cd $RPM_BUILD_DIR
# the <onedir>/bin directory
find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \;
$RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux
$RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux
$RPM_BUILD_DIR/build/venv/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt
# Generate master config
sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master
%else
# The relenv onedir is being provided, all setup up until Salt is installed
# is expected to be done
@ -151,6 +163,9 @@ cd $RPM_BUILD_DIR
# Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the <onedir>/bin directory
find salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \;
# Generate master config
sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master
cd $RPM_BUILD_DIR
%endif
@ -208,7 +223,7 @@ install -m 0755 %{buildroot}/opt/saltstack/salt/salt-pip %{buildroot}%{_bindir}/
# Add the config files
install -p -m 0640 %{_salt_src}/conf/minion %{buildroot}%{_sysconfdir}/salt/minion
install -p -m 0640 %{_salt_src}/pkg/common/conf/master %{buildroot}%{_sysconfdir}/salt/master
install -p -m 0640 $RPM_BUILD_DIR/build/master %{buildroot}%{_sysconfdir}/salt/master
install -p -m 0640 %{_salt_src}/conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud
install -p -m 0640 %{_salt_src}/conf/roster %{buildroot}%{_sysconfdir}/salt/roster
install -p -m 0640 %{_salt_src}/conf/proxy %{buildroot}%{_sysconfdir}/salt/proxy
@ -274,8 +289,6 @@ rm -rf %{buildroot}
%dir %{_sysconfdir}/salt/pki
%files master
%defattr(-,root,root)
%doc %{_mandir}/man7/salt.7*
@ -307,6 +320,7 @@ rm -rf %{buildroot}
%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/syndics/
%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/tokens/
%files minion
%defattr(-,root,root)
%doc %{_mandir}/man1/salt-call.1*
@ -323,17 +337,20 @@ rm -rf %{buildroot}
%dir %{_sysconfdir}/salt/minion.d
%dir %attr(0750, root, root) %{_var}/cache/salt/minion/
%files syndic
%doc %{_mandir}/man1/salt-syndic.1*
%{_bindir}/salt-syndic
%{_unitdir}/salt-syndic.service
%files api
%defattr(-,root,root)
%doc %{_mandir}/man1/salt-api.1*
%{_bindir}/salt-api
%{_unitdir}/salt-api.service
%files cloud
%doc %{_mandir}/man1/salt-cloud.1*
%{_bindir}/salt-cloud
@ -344,36 +361,65 @@ rm -rf %{buildroot}
%{_sysconfdir}/salt/cloud.providers.d
%config(noreplace) %{_sysconfdir}/salt/cloud
%files ssh
%doc %{_mandir}/man1/salt-ssh.1*
%{_bindir}/salt-ssh
%config(noreplace) %{_sysconfdir}/salt/roster
# Add salt user/group for Salt Master
%pre master
getent group salt >/dev/null || groupadd -r salt
getent passwd salt >/dev/null || \
useradd -r -g salt -s /sbin/nologin \
-c "Salt user for Salt Master" salt
%pre
# create user to avoid running server as root
# 1. create group if not existing
if ! getent group %{_SALT_GROUP}; then
groupadd --system %{_SALT_GROUP} 2>/dev/null ||true
fi
# 2. create homedir if not existing
test -d %{_SALT_HOME} || mkdir -p %{_SALT_HOME}
# 3. create user if not existing
# -g %{_SALT_GROUP} \
if ! getent passwd | grep -q "^%{_SALT_USER}:"; then
useradd --system \
--no-create-home \
-s /sbin/nologin \
-g %{_SALT_GROUP} \
%{_SALT_USER} 2>/dev/null || true
fi
# 4. adjust passwd entry
usermod -c "%{_SALT_NAME}" \
-d %{_SALT_HOME} \
-g %{_SALT_GROUP} \
%{_SALT_USER}
# 5. adjust file and directory permissions
chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME}
# assumes systemd for RHEL 7 & 8 & 9
%preun master
# RHEL 9 is giving warning msg if syndic is not installed, supress it
%systemd_preun salt-syndic.service > /dev/null 2>&1
%preun minion
%systemd_preun salt-minion.service
%preun api
%systemd_preun salt-api.service
%post
chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME}
chmod u=rwx,g=rwx,o=rx %{_SALT_HOME}
ln -s -f /opt/saltstack/salt/spm %{_bindir}/spm
ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip
%post cloud
chown -R salt:salt /etc/salt/cloud.deploy.d
chown -R salt:salt /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy
ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud
%post master
%systemd_post salt-master.service
ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt
@ -450,6 +496,41 @@ fi
%changelog
* Fri May 05 2023 Salt Project Packaging <saltproject-packaging@vmware.com> - 3006.1
# Fixed
- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236)
- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477)
- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589)
- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785)
- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810)
- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081)
- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082)
- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus.
This event is only used when these functions are called via the schedule execution modules.
Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103)
- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109)
- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111)
- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager
whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113)
- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114)
- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117)
- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118)
- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126)
- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141)
- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150)
- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158)
- Fixed issue in salt-cloud so that multiple masters specified in the cloud
are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170)
- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184)
- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186)
- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192)
- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200)
- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204)
- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222)
* Tue Apr 18 2023 Salt Project Packaging <saltproject-packaging@vmware.com> - 3006.0
# Removed

View file

@ -21,6 +21,7 @@ from tests.support.helpers import (
SaltPkgInstall,
TestUser,
)
from tests.support.sminion import create_sminion
log = logging.getLogger(__name__)
@ -33,6 +34,16 @@ def version(install_salt):
return install_salt.get_version(version_only=True)
@pytest.fixture(scope="session")
def sminion():
return create_sminion()
@pytest.fixture(scope="session")
def grains(sminion):
return sminion.opts["grains"].copy()
def pytest_addoption(parser):
"""
register argparse-style options and ini-style config values.
@ -463,12 +474,17 @@ def extras_pypath():
extras_dir = "extras-{}.{}".format(*sys.version_info)
if platform.is_windows():
return pathlib.Path(
os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir, "bin"
os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir
)
elif platform.is_darwin():
return pathlib.Path(f"/opt", "salt", extras_dir, "bin")
return pathlib.Path("/opt", "salt", extras_dir)
else:
return pathlib.Path(f"/opt", "saltstack", "salt", extras_dir, "bin")
return pathlib.Path("/opt", "saltstack", "salt", extras_dir)
@pytest.fixture(scope="module")
def extras_pypath_bin(extras_pypath):
return extras_pypath / "bin"
@pytest.fixture(scope="module")
@ -476,7 +492,7 @@ def salt_api(salt_master, install_salt, extras_pypath):
"""
start up and configure salt_api
"""
shutil.rmtree(str(extras_pypath.parent), ignore_errors=True)
shutil.rmtree(str(extras_pypath), ignore_errors=True)
start_timeout = None
if platform.is_windows() and install_salt.singlebin:
start_timeout = 240

View file

@ -1,229 +1,55 @@
"""
Test Salt Pkg Downloads
"""
import contextlib
import logging
import os
import pathlib
import re
import shutil
import attr
import packaging
import pytest
from pytestskipmarkers.utils import platform
from saltfactories.utils import random_string
log = logging.getLogger(__name__)
@attr.s(kw_only=True, slots=True)
class PkgImage:
name = attr.ib()
os_type = attr.ib()
os_version = attr.ib()
os_codename = attr.ib(default=None)
container_id = attr.ib()
container = attr.ib(default=None)
def __str__(self):
return f"{self.container_id}"
def get_test_versions():
test_versions = []
containers = [
{
"image": "ghcr.io/saltstack/salt-ci-containers/amazon-linux:2",
"os_type": "amazon",
"os_version": 2,
"container_id": "amazon_2",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/centos:7",
"os_type": "redhat",
"os_version": 7,
"container_id": "centos_7",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/centos-stream:8",
"os_type": "redhat",
"os_version": 8,
"container_id": "centosstream_8",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/centos-stream:9",
"os_type": "redhat",
"os_version": 9,
"container_id": "centosstream_9",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/fedora:36",
"os_type": "fedora",
"os_version": 36,
"container_id": "fedora_36",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/fedora:37",
"os_type": "fedora",
"os_version": 37,
"container_id": "fedora_37",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/fedora:38",
"os_type": "fedora",
"os_version": 38,
"container_id": "fedora_38",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/debian:10",
"os_type": "debian",
"os_version": 10,
"os_codename": "buster",
"container_id": "debian_10",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/debian:11",
"os_type": "debian",
"os_version": 11,
"os_codename": "bullseye",
"container_id": "debian_11",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/ubuntu:20.04",
"os_type": "ubuntu",
"os_version": 20.04,
"os_codename": "focal",
"container_id": "ubuntu_20_04",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/ubuntu:22.04",
"os_type": "ubuntu",
"os_version": 22.04,
"os_codename": "jammy",
"container_id": "ubuntu_22_04",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/photon:3",
"os_type": "photon",
"os_version": 3,
"container_id": "photon_3",
},
{
"image": "ghcr.io/saltstack/salt-ci-containers/photon:4",
"os_type": "photon",
"os_version": 4,
"container_id": "photon_4",
},
]
for container in containers:
test_versions.append(
PkgImage(
name=container["image"],
os_type=container["os_type"],
os_version=container["os_version"],
os_codename=container.get("os_codename", ""),
container_id=container["container_id"],
)
)
return test_versions
def get_container_type_id(value):
return f"{value}"
@pytest.fixture(scope="module", params=get_test_versions(), ids=get_container_type_id)
def download_test_image(request):
return request.param
def get_salt_test_commands():
salt_release = get_salt_release()
if platform.is_windows():
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
salt_test_commands = [
"salt-call.exe --local test.versions",
"salt-call.exe --local grains.items",
"salt-minion.exe --version",
["salt-call.exe", "--local", "test.versions"],
["salt-call.exe", "--local", "grains.items"],
["salt-minion.exe", "--version"],
]
else:
salt_test_commands = [
"salt-call.bat --local test.versions",
"salt-call.bat --local grains.items",
"salt.bat --version",
"salt-master.bat --version",
"salt-minion.bat --version",
"salt-ssh.bat --version",
"salt-syndic.bat --version",
"salt-api.bat --version",
"salt-cloud.bat --version",
["salt-call.bat", "--local", "test.versions"],
["salt-call.bat", "--local", "grains.items"],
["salt.bat", "--version"],
["salt-master.bat", "--version"],
["salt-minion.bat", "--version"],
["salt-ssh.bat", "--version"],
["salt-syndic.bat", "--version"],
["salt-api.bat", "--version"],
["salt-cloud.bat", "--version"],
]
else:
salt_test_commands = [
"salt-call --local test.versions",
"salt-call --local grains.items",
"salt --version",
"salt-master --version",
"salt-minion --version",
"salt-ssh --version",
"salt-syndic --version",
"salt-api --version",
"salt-cloud --version",
["salt-call", "--local", "test.versions"],
["salt-call", "--local", "grains.items"],
["salt", "--version"],
["salt-master", "--version"],
["salt-minion", "--version"],
["salt-ssh", "--version"],
["salt-syndic", "--version"],
["salt-api", "--version"],
["salt-cloud", "--version"],
]
return salt_test_commands
@pytest.fixture(scope="module")
def pkg_container(
salt_factories,
download_test_image,
root_url,
salt_release,
tmp_path_factory,
gpg_key_name,
):
downloads_path = tmp_path_factory.mktemp("downloads")
container = salt_factories.get_container(
random_string(f"{download_test_image.container_id}_"),
download_test_image.name,
pull_before_start=True,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
container_run_kwargs=dict(
volumes={
str(downloads_path): {"bind": "/downloads", "mode": "z"},
}
),
)
try:
container_setup_func = globals()[f"setup_{download_test_image.os_type}"]
except KeyError:
raise pytest.skip.Exception(
f"Unable to handle {download_test_image.os_type}. Skipping.",
_use_item_location=True,
)
container.before_terminate(shutil.rmtree, str(downloads_path), ignore_errors=True)
with container.started():
download_test_image.container = container
try:
container_setup_func(
container,
download_test_image.os_version,
download_test_image.os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
)
yield download_test_image
except Exception as exc:
pytest.fail(f"Failed to setup {pkg_container.os_type}: {exc}")
@pytest.fixture(scope="module")
def root_url(salt_release):
if os.environ.get("SALT_REPO_TYPE", "release") == "staging":
@ -270,6 +96,28 @@ def get_salt_release():
return salt_release
def get_repo_subpath_params():
current_release = packaging.version.parse(get_salt_release())
params = ["minor", current_release.major]
latest_env_var = os.environ.get("LATEST_SALT_RELEASE")
if latest_env_var is not None:
latest_release = packaging.version.parse(latest_env_var)
if current_release >= latest_release:
log.debug(
f"Running the tests for the latest release since {str(current_release)} >= {str(latest_release)}"
)
params.append("latest")
return params
@pytest.fixture(
scope="module",
params=get_repo_subpath_params(),
)
def repo_subpath(request):
return request.param
@pytest.fixture(scope="module")
def gpg_key_name(salt_release):
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
@ -282,28 +130,123 @@ def salt_release():
yield get_salt_release()
@pytest.fixture(scope="module")
def _setup_system(
tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name, repo_subpath
):
downloads_path = tmp_path_factory.mktemp("downloads")
try:
# Windows is a special case, because sometimes we need to uninstall the packages
if grains["os_family"] == "Windows":
with setup_windows(
shell,
root_url=root_url,
salt_release=salt_release,
downloads_path=downloads_path,
repo_subpath=repo_subpath,
):
yield
else:
if grains["os_family"] == "MacOS":
setup_macos(
shell,
root_url=root_url,
salt_release=salt_release,
downloads_path=downloads_path,
repo_subpath=repo_subpath,
)
elif grains["os"] == "Amazon":
setup_redhat_family(
shell,
os_name=grains["os"].lower(),
os_version=grains["osmajorrelease"],
root_url=root_url,
salt_release=salt_release,
downloads_path=downloads_path,
gpg_key_name=gpg_key_name,
repo_subpath=repo_subpath,
)
elif grains["os"] == "Fedora":
setup_redhat_family(
shell,
os_name=grains["os"].lower(),
os_version=grains["osmajorrelease"],
root_url=root_url,
salt_release=salt_release,
downloads_path=downloads_path,
gpg_key_name=gpg_key_name,
repo_subpath=repo_subpath,
)
elif grains["os"] == "VMware Photon OS":
setup_redhat_family(
shell,
os_name="photon",
os_version=grains["osmajorrelease"],
root_url=root_url,
salt_release=salt_release,
downloads_path=downloads_path,
gpg_key_name=gpg_key_name,
repo_subpath=repo_subpath,
)
elif grains["os_family"] == "RedHat":
setup_redhat_family(
shell,
os_name="redhat",
os_version=grains["osmajorrelease"],
root_url=root_url,
salt_release=salt_release,
downloads_path=downloads_path,
gpg_key_name=gpg_key_name,
repo_subpath=repo_subpath,
)
elif grains["os_family"] == "Debian":
setup_debian_family(
shell,
os_name=grains["os"].lower(),
os_version=grains["osrelease"],
os_codename=grains["oscodename"],
root_url=root_url,
salt_release=salt_release,
downloads_path=downloads_path,
gpg_key_name=gpg_key_name,
repo_subpath=repo_subpath,
)
else:
pytest.fail("Don't know how to handle %s", grains["osfinger"])
yield
finally:
shutil.rmtree(downloads_path, ignore_errors=True)
def setup_redhat_family(
container,
shell,
os_name,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
os_name,
gpg_key_name,
repo_subpath,
):
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
if arch == "aarch64":
arch = "arm64"
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}"
gpg_file_url = f"{repo_url_base}/{gpg_key_name}"
if repo_subpath == "minor":
repo_url_base = (
f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}"
)
else:
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}"
gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}"
try:
pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
except Exception as exc:
pytest.fail(f"Failed to download {gpg_file_url}: {exc}")
ret = container.run("rpm", "--import", f"/downloads/{gpg_key_name}")
ret = shell.run("rpm", "--import", str(downloads_path / gpg_key_name), check=False)
if ret.returncode != 0:
pytest.fail("Failed to import gpg key")
@ -311,11 +254,9 @@ def setup_redhat_family(
f"{repo_url_base}.repo", downloads_path / f"salt-{os_name}.repo"
)
clean_command = "all" if os_name == "photon" else "expire-cache"
install_dmesg = ("yum", "install", "-y", "util-linux")
commands = [
("mv", f"/downloads/{repo_file.name}", f"/etc/yum.repos.d/salt-{os_name}.repo"),
("yum", "clean", clean_command),
("mv", str(repo_file), "/etc/yum.repos.d/salt.repo"),
("yum", "clean", "all" if os_name == "photon" else "expire-cache"),
(
"yum",
"install",
@ -329,109 +270,22 @@ def setup_redhat_family(
),
]
# For some reason, the centosstream9 container doesn't have dmesg installed
if os_version == 9 and os_name == "redhat":
commands.insert(2, install_dmesg)
for cmd in commands:
ret = container.run(*cmd)
ret = shell.run(*cmd, check=False)
if ret.returncode != 0:
pytest.fail(f"Failed to run: {' '.join(cmd)!r}")
def setup_amazon(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"amazon",
gpg_key_name,
)
def setup_redhat(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"redhat",
gpg_key_name,
)
def setup_fedora(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"fedora",
gpg_key_name,
)
def setup_photon(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"photon",
gpg_key_name,
)
pytest.fail(f"Failed to run '{' '.join(cmd)!r}':\n{ret}")
def setup_debian_family(
container,
shell,
os_name,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
os_name,
gpg_key_name,
repo_subpath,
):
arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
if arch == "aarch64":
@ -439,12 +293,18 @@ def setup_debian_family(
elif arch == "x86_64":
arch = "amd64"
ret = container.run("apt-get", "update", "-y")
ret = shell.run("apt-get", "update", "-y", check=False)
if ret.returncode != 0:
pytest.fail("Failed to run: 'apt-get update -y'")
pytest.fail(str(ret))
if repo_subpath == "minor":
repo_url_base = (
f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}"
)
else:
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}"
gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}"
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}"
gpg_file_url = f"{repo_url_base}/{gpg_key_name}"
try:
pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
except Exception as exc:
@ -455,10 +315,14 @@ def setup_debian_family(
f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n"
)
commands = [
("mv", f"/downloads/{gpg_key_name}", f"/usr/share/keyrings/{gpg_key_name}"),
(
"mv",
f"/downloads/{salt_sources_path.name}",
str(downloads_path / gpg_key_name),
f"/usr/share/keyrings/{gpg_key_name}",
),
(
"mv",
str(salt_sources_path),
"/etc/apt/sources.list.d/salt.list",
),
("apt-get", "install", "-y", "ca-certificates"),
@ -477,156 +341,110 @@ def setup_debian_family(
),
]
for cmd in commands:
ret = container.run(*cmd)
ret = shell.run(*cmd)
if ret.returncode != 0:
pytest.fail(f"Failed to run: {' '.join(cmd)!r}\n{ret}")
pytest.fail(str(ret))
def setup_debian(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_debian_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"debian",
gpg_key_name,
)
def setup_ubuntu(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_debian_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"ubuntu",
gpg_key_name,
)
@pytest.fixture(scope="module")
def setup_macos(root_url, salt_release, shell):
def setup_macos(shell, root_url, salt_release, downloads_path, repo_subpath):
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
if arch == "aarch64":
arch = "arm64"
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg"
mac_pkg_url = f"{root_url}/macos/minor/{salt_release}/{mac_pkg}"
if repo_subpath == "minor":
mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}"
else:
mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}"
else:
mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}"
mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg"
mac_pkg_path = f"/tmp/{mac_pkg}"
pytest.helpers.download_file(mac_pkg_url, f"/tmp/{mac_pkg}")
mac_pkg_path = downloads_path / mac_pkg
pytest.helpers.download_file(mac_pkg_url, mac_pkg_path)
ret = shell.run(
"installer",
"-pkg",
mac_pkg_path,
str(mac_pkg_path),
"-target",
"/",
check=False,
)
assert ret.returncode == 0, ret
yield
@contextlib.contextmanager
def setup_windows(shell, root_url, salt_release, downloads_path, repo_subpath):
try:
root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt")
arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
install_type = os.environ.get("INSTALL_TYPE") or "msi"
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
if install_type.lower() == "nsis":
if arch.lower() != "x86":
arch = arch.upper()
win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe"
else:
if arch.lower() != "x86":
arch = arch.upper()
win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi"
if repo_subpath == "minor":
win_pkg_url = (
f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}"
)
else:
win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}"
ssm_bin = root_dir / "ssm.exe"
else:
win_pkg = f"salt-{salt_release}-windows-{arch}.exe"
win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}"
ssm_bin = root_dir / "bin" / "ssm_bin"
pkg_path = downloads_path / win_pkg
pytest.helpers.download_file(win_pkg_url, pkg_path)
if install_type.lower() == "nsis":
ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False)
else:
ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""')
assert ret.returncode == 0, ret
log.debug("Removing installed salt-minion service")
ret = shell.run(
"cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False
)
assert ret.returncode == 0, ret
yield
finally:
# We need to uninstall the MSI packages, otherwise they will not install correctly
if install_type.lower() == "msi":
ret = shell.run("msiexec", "/qn", "/x", str(pkg_path))
assert ret.returncode == 0, ret
@pytest.fixture(scope="module")
def setup_windows(root_url, salt_release, shell):
def install_dir(_setup_system):
if platform.is_windows():
return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt").resolve()
if platform.is_darwin():
return pathlib.Path("/opt", "salt")
return pathlib.Path("/opt", "saltstack", "salt")
root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt")
arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
install_type = os.environ.get("INSTALL_TYPE") or "msi"
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
if install_type.lower() == "nsis":
if arch.lower() != "x86":
arch = arch.upper()
win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe"
else:
if arch.lower() != "x86":
arch = arch.upper()
win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi"
win_pkg_url = f"{root_url}/windows/minor/{salt_release}/{win_pkg}"
ssm_bin = root_dir / "ssm.exe"
else:
win_pkg = f"salt-{salt_release}-windows-{arch}.exe"
win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}"
ssm_bin = root_dir / "bin" / "ssm_bin"
@pytest.fixture(scope="module")
def salt_test_command(request, install_dir):
command = request.param
command[0] = str(install_dir / command[0])
return command
pkg_path = pathlib.Path(r"C:\TEMP", win_pkg)
pkg_path.parent.mkdir(exist_ok=True)
pytest.helpers.download_file(win_pkg_url, pkg_path)
if install_type.lower() == "nsis":
ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False)
else:
ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""')
assert ret.returncode == 0, ret
log.debug("Removing installed salt-minion service")
ret = shell.run(
"cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False
)
assert ret.returncode == 0, ret
@pytest.mark.skip_unless_on_linux
@pytest.mark.parametrize("salt_test_command", get_salt_test_commands())
@pytest.mark.skip_if_binaries_missing("dockerd")
def test_download_linux(salt_test_command, pkg_container, root_url, salt_release):
"""
Test downloading of Salt packages and running various commands on Linux hosts
"""
res = pkg_container.container.run(salt_test_command)
assert res.returncode == 0
@pytest.mark.skip_unless_on_darwin
@pytest.mark.usefixtures("setup_macos")
@pytest.mark.parametrize("salt_test_command", get_salt_test_commands())
def test_download_macos(salt_test_command, shell):
"""
Test downloading of Salt packages and running various commands on Mac OS hosts
"""
_cmd = salt_test_command.split()
ret = shell.run(*_cmd, check=False)
assert ret.returncode == 0, ret
@pytest.mark.skip_unless_on_windows
@pytest.mark.usefixtures("setup_windows")
@pytest.mark.parametrize("salt_test_command", get_salt_test_commands())
def test_download_windows(salt_test_command, shell):
"""
Test downloading of Salt packages and running various commands on Windows hosts
"""
_cmd = salt_test_command.split()
root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt")
_cmd[0] = str(root_dir / _cmd[0])
ret = shell.run(*_cmd, check=False)
@pytest.mark.parametrize("salt_test_command", get_salt_test_commands(), indirect=True)
def test_download(shell, salt_test_command):
"""
Test downloading of Salt packages and running various commands.
"""
ret = shell.run(*salt_test_command, check=False)
assert ret.returncode == 0, ret

View file

@ -13,25 +13,21 @@ def pypath():
if platform.is_windows():
return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt")
elif platform.is_darwin():
return pathlib.Path(f"{os.sep}opt", "salt", "bin")
return pathlib.Path("/opt", "salt", "bin")
else:
return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "bin")
return pathlib.Path("/opt", "saltstack", "salt", "bin")
@pytest.fixture(autouse=True)
def wipe_pydeps(install_salt, extras_pypath):
def wipe_pydeps(shell, install_salt, extras_pypath):
try:
yield
finally:
# Note, uninstalling anything with an associated script will leave the script.
# This is due to a bug in pip.
for dep in ["pep8", "PyGithub"]:
subprocess.run(
install_salt.binary_paths["pip"] + ["uninstall", "-y", dep],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False,
universal_newlines=True,
shell.run(
*(install_salt.binary_paths["pip"] + ["uninstall", "-y", dep]),
)
shutil.rmtree(extras_pypath, ignore_errors=True)
@ -56,32 +52,24 @@ def test_pip_install(salt_call_cli):
assert "The github execution module cannot be loaded" in use_lib.stderr
def test_pip_install_extras(install_salt, extras_pypath):
def test_pip_install_extras(shell, install_salt, extras_pypath_bin):
"""
Test salt-pip installs into the correct directory
"""
dep = "pep8"
extras_keyword = "extras"
extras_keyword = "extras-3"
if platform.is_windows():
check_path = extras_pypath / f"{dep}.exe"
check_path = extras_pypath_bin / f"{dep}.exe"
else:
check_path = extras_pypath / dep
check_path = extras_pypath_bin / dep
install_ret = subprocess.run(
install_salt.binary_paths["pip"] + ["install", dep],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
install_ret = shell.run(*(install_salt.binary_paths["pip"] + ["install", dep]))
assert install_ret.returncode == 0
ret = subprocess.run(
install_salt.binary_paths["pip"] + ["list", "--format=json"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
ret = shell.run(*(install_salt.binary_paths["pip"] + ["list", "--format=json"]))
assert ret.returncode == 0
pkgs_installed = json.loads(ret.stdout.strip().decode())
for pkg in pkgs_installed:
assert ret.data # We can parse the JSON output
for pkg in ret.data:
if pkg["name"] == dep:
break
else:
@ -89,15 +77,14 @@ def test_pip_install_extras(install_salt, extras_pypath):
f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}"
)
show_ret = subprocess.run(
install_salt.binary_paths["pip"] + ["show", dep],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
show_ret = shell.run(*(install_salt.binary_paths["pip"] + ["show", dep]))
assert show_ret.returncode == 0
assert extras_keyword in show_ret.stdout.decode()
assert extras_keyword in show_ret.stdout
assert check_path.exists()
ret = shell.run(str(check_path), "--version")
assert ret.returncode == 0
def demote(user_uid, user_gid):
def result():
@ -108,8 +95,8 @@ def demote(user_uid, user_gid):
@pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows")
def test_pip_non_root(install_salt, test_account, extras_pypath):
check_path = extras_pypath / "pep8"
def test_pip_non_root(shell, install_salt, test_account, extras_pypath_bin):
check_path = extras_pypath_bin / "pep8"
# We should be able to issue a --help without being root
ret = subprocess.run(
install_salt.binary_paths["salt"] + ["--help"],
@ -156,3 +143,44 @@ def test_pip_non_root(install_salt, test_account, extras_pypath):
assert check_path.exists()
assert ret.returncode == 0, ret.stderr
def test_pip_install_salt_extension_in_extras(install_salt, extras_pypath, shell):
"""
Test salt-pip installs into the correct directory and the salt extension
is properly loaded.
"""
dep = "salt-analytics-framework"
dep_version = "0.1.0"
install_ret = shell.run(
*(install_salt.binary_paths["pip"] + ["install", f"{dep}=={dep_version}"]),
)
assert install_ret.returncode == 0
ret = shell.run(
*(install_salt.binary_paths["pip"] + ["list", "--format=json"]),
)
assert ret.returncode == 0
pkgs_installed = json.loads(ret.stdout.strip())
for pkg in pkgs_installed:
if pkg["name"] == dep:
break
else:
pytest.fail(
f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}"
)
show_ret = shell.run(
*(install_salt.binary_paths["pip"] + ["show", dep]),
)
assert show_ret.returncode == 0
assert extras_pypath.joinpath("saf").is_dir()
ret = shell.run(
*(install_salt.binary_paths["minion"] + ["--versions-report"]),
)
assert show_ret.returncode == 0
assert "Salt Extensions" in ret.stdout
assert f"{dep}: {dep_version}" in ret.stdout

View file

@ -1,3 +1,6 @@
import pathlib
import subprocess
import psutil
import pytest
import yaml
@ -5,6 +8,7 @@ from pytestskipmarkers.utils import platform
pytestmark = [
pytest.mark.skip_on_windows,
pytest.mark.skip_on_darwin,
]
@ -12,11 +16,56 @@ def test_salt_user_master(salt_master, install_salt):
"""
Test the correct user is running the Salt Master
"""
if platform.is_windows() or platform.is_darwin():
pytest.skip("Package does not have user set. Not testing user")
match = False
for proc in psutil.Process(salt_master.pid).children():
assert proc.username() == "salt"
match = True
assert match
def test_salt_user_home(install_salt):
"""
Test the correct user is running the Salt Master
"""
proc = subprocess.run(
["getent", "passwd", "salt"], check=False, capture_output=True
)
assert proc.returncode == 0
home = ""
try:
home = proc.stdout.decode().split(":")[5]
except:
pass
assert home == "/opt/saltstack/salt"
def test_salt_user_group(install_salt):
"""
Test the salt user is the salt group
"""
proc = subprocess.run(["id", "salt"], check=False, capture_output=True)
assert proc.returncode == 0
in_group = False
try:
for group in proc.stdout.decode().split(" "):
if "salt" in group:
in_group = True
except:
pass
assert in_group is True
def test_salt_cloud_dirs(install_salt):
"""
Test the correct user is running the Salt Master
"""
paths = [
"/opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy",
"/etc/salt/cloud.deploy.d",
]
for name in paths:
path = pathlib.Path(name)
assert path.exists()
assert path.owner() == "salt"
assert path.group() == "salt"

View file

@ -0,0 +1,15 @@
import os
import pytest
pytestmark = [
pytest.mark.skip_unless_on_windows,
]
def test_ssm_present(install_salt):
"""
The ssm.exe binary needs to be present in both the zip and the exe/msi
builds
"""
assert os.path.exists(install_salt.ssm_bin)

View file

@ -601,7 +601,7 @@ class SaltPkgInstall:
else:
log.info("Installing packages:\n%s", pprint.pformat(self.pkgs))
ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs)
if not (platform.is_darwin() or platform.is_windows()):
if not platform.is_darwin() and not platform.is_windows():
# Make sure we don't have any trailing references to old package file locations
assert "No such file or directory" not in ret.stdout
assert "/saltstack/salt/run" not in ret.stdout

102
pkg/tests/support/paths.py Normal file
View file

@ -0,0 +1,102 @@
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
:copyright: Copyright 2017 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
tests.support.paths
~~~~~~~~~~~~~~~~~~~
Tests related paths
"""
import logging
import os
import re
import sys
import tempfile
log = logging.getLogger(__name__)
SALT_CODE_DIR = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
)
)
),
"salt",
)
TESTS_DIR = os.path.join(os.path.dirname(SALT_CODE_DIR), "tests")
if TESTS_DIR.startswith("//"):
# Have we been given an initial double forward slash? Ditch it!
TESTS_DIR = TESTS_DIR[1:]
if sys.platform.startswith("win"):
TESTS_DIR = os.path.normcase(TESTS_DIR)
CODE_DIR = os.path.dirname(TESTS_DIR)
if sys.platform.startswith("win"):
CODE_DIR = CODE_DIR.replace("\\", "\\\\")
UNIT_TEST_DIR = os.path.join(TESTS_DIR, "unit")
INTEGRATION_TEST_DIR = os.path.join(TESTS_DIR, "integration")
# Let's inject CODE_DIR so salt is importable if not there already
if TESTS_DIR in sys.path:
sys.path.remove(TESTS_DIR)
if CODE_DIR in sys.path and sys.path[0] != CODE_DIR:
sys.path.remove(CODE_DIR)
if CODE_DIR not in sys.path:
sys.path.insert(0, CODE_DIR)
if TESTS_DIR not in sys.path:
sys.path.insert(1, TESTS_DIR)
SYS_TMP_DIR = os.path.abspath(
os.path.realpath(
# Avoid ${TMPDIR} and gettempdir() on MacOS as they yield a base path too long
# for unix sockets: ``error: AF_UNIX path too long``
# Gentoo Portage prefers ebuild tests are rooted in ${TMPDIR}
os.environ.get("TMPDIR", tempfile.gettempdir())
if not sys.platform.startswith("darwin")
else "/tmp"
)
)
TMP = os.path.join(SYS_TMP_DIR, "salt-tests-tmpdir")
TMP_ROOT_DIR = os.path.join(TMP, "rootdir")
FILES = os.path.join(INTEGRATION_TEST_DIR, "files")
BASE_FILES = os.path.join(INTEGRATION_TEST_DIR, "files", "file", "base")
PROD_FILES = os.path.join(INTEGRATION_TEST_DIR, "files", "file", "prod")
PYEXEC = "python{}.{}".format(*sys.version_info)
MOCKBIN = os.path.join(INTEGRATION_TEST_DIR, "mockbin")
SCRIPT_DIR = os.path.join(CODE_DIR, "scripts")
TMP_STATE_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-state-tree")
TMP_PILLAR_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-pillar-tree")
TMP_PRODENV_STATE_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-prodenv-state-tree")
TMP_PRODENV_PILLAR_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-prodenv-pillar-tree")
TMP_CONF_DIR = TMP_MINION_CONF_DIR = os.path.join(TMP, "config")
TMP_SUB_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, "sub-minion")
TMP_SYNDIC_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, "syndic-minion")
TMP_SYNDIC_MASTER_CONF_DIR = os.path.join(TMP_CONF_DIR, "syndic-master")
TMP_SSH_CONF_DIR = TMP_MINION_CONF_DIR
CONF_DIR = os.path.join(INTEGRATION_TEST_DIR, "files", "conf")
PILLAR_DIR = os.path.join(FILES, "pillar")
TMP_SCRIPT_DIR = os.path.join(TMP, "scripts")
ENGINES_DIR = os.path.join(FILES, "engines")
LOG_HANDLERS_DIR = os.path.join(FILES, "log_handlers")
def list_test_mods():
"""
A generator which returns all of the test files
"""
test_re = re.compile(r"^test_.+\.py$")
for dirname in (UNIT_TEST_DIR, INTEGRATION_TEST_DIR):
test_type = os.path.basename(dirname)
for root, _, files in os.walk(dirname):
parent_mod = root[len(dirname) :].lstrip(os.sep).replace(os.sep, ".")
for filename in files:
if test_re.match(filename):
mod_name = test_type
if parent_mod:
mod_name += "." + parent_mod
mod_name += "." + filename[:-3]
yield mod_name

View file

@ -0,0 +1,209 @@
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
.. _runtime_vars:
Runtime Variables
-----------------
:command:`salt-runtests` provides a variable, :py:attr:`RUNTIME_VARS` which has some common paths defined at
startup:
.. autoattribute:: tests.support.runtests.RUNTIME_VARS
:annotation:
:TMP: Tests suite temporary directory
:TMP_CONF_DIR: Configuration directory from where the daemons that :command:`salt-runtests` starts get their
configuration files.
:TMP_CONF_MASTER_INCLUDES: Salt Master configuration files includes directory. See
:salt_conf_master:`default_include`.
:TMP_CONF_MINION_INCLUDES: Salt Minion configuration files includes directory. Seei
:salt_conf_minion:`include`.
:TMP_CONF_CLOUD_INCLUDES: Salt cloud configuration files includes directory. The same as the salt master and
minion includes configuration, though under a different directory name.
:TMP_CONF_CLOUD_PROFILE_INCLUDES: Salt cloud profiles configuration files includes directory. Same as above.
:TMP_CONF_CLOUD_PROVIDER_INCLUDES: Salt cloud providers configuration files includes directory. Same as above.
:TMP_SCRIPT_DIR: Temporary scripts directory from where the Salt CLI tools will be called when running tests.
:TMP_SALT_INTEGRATION_FILES: Temporary directory from where Salt's test suite integration files are copied to.
:TMP_BASEENV_STATE_TREE: Salt master's **base** environment state tree directory
:TMP_PRODENV_STATE_TREE: Salt master's **production** environment state tree directory
:TMP_BASEENV_PILLAR_TREE: Salt master's **base** environment pillar tree directory
:TMP_PRODENV_PILLAR_TREE: Salt master's **production** environment pillar tree directory
Use it on your test case in case of need. As simple as:
.. code-block:: python
import os
from tests.support.runtests import RUNTIME_VARS
# Path to the testing minion configuration file
minion_config_path = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'minion')
.. _`pytest`: http://pytest.org
"""
import logging
import os
import shutil
import salt.utils.path
import salt.utils.platform
import tests.support.paths as paths
try:
import pwd
except ImportError:
import salt.utils.win_functions
log = logging.getLogger(__name__)
def this_user():
"""
Get the user associated with the current process.
"""
if salt.utils.platform.is_windows():
return salt.utils.win_functions.get_current_user(with_domain=False)
return pwd.getpwuid(os.getuid())[0]
class RootsDict(dict):
def merge(self, data):
for key, values in data.items():
if key not in self:
self[key] = values
continue
for value in values:
if value not in self[key]:
self[key].append(value)
return self
def to_dict(self):
return dict(self)
def recursive_copytree(source, destination, overwrite=False):
for root, dirs, files in os.walk(source):
for item in dirs:
src_path = os.path.join(root, item)
dst_path = os.path.join(
destination, src_path.replace(source, "").lstrip(os.sep)
)
if not os.path.exists(dst_path):
log.debug("Creating directory: %s", dst_path)
os.makedirs(dst_path)
for item in files:
src_path = os.path.join(root, item)
dst_path = os.path.join(
destination, src_path.replace(source, "").lstrip(os.sep)
)
if os.path.exists(dst_path) and not overwrite:
if os.stat(src_path).st_mtime > os.stat(dst_path).st_mtime:
log.debug("Copying %s to %s", src_path, dst_path)
shutil.copy2(src_path, dst_path)
else:
if not os.path.isdir(os.path.dirname(dst_path)):
log.debug("Creating directory: %s", os.path.dirname(dst_path))
os.makedirs(os.path.dirname(dst_path))
log.debug("Copying %s to %s", src_path, dst_path)
shutil.copy2(src_path, dst_path)
class RuntimeVars:
__self_attributes__ = ("_vars", "_locked", "lock")
def __init__(self, **kwargs):
self._vars = kwargs
self._locked = False
def lock(self):
# Late import
from salt.utils.immutabletypes import freeze
frozen_vars = freeze(self._vars.copy())
self._vars = frozen_vars
self._locked = True
def __iter__(self):
yield from self._vars.items()
def __getattribute__(self, name):
if name in object.__getattribute__(self, "_vars"):
return object.__getattribute__(self, "_vars")[name]
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if getattr(self, "_locked", False) is True:
raise RuntimeError(
"After {} is locked, no additional data can be added to it".format(
self.__class__.__name__
)
)
if name in object.__getattribute__(self, "__self_attributes__"):
object.__setattr__(self, name, value)
return
self._vars[name] = value
# <---- Helper Methods -----------------------------------------------------------------------------------------------
# ----- Global Variables -------------------------------------------------------------------------------------------->
XML_OUTPUT_DIR = os.environ.get(
"SALT_XML_TEST_REPORTS_DIR", os.path.join(paths.TMP, "xml-test-reports")
)
# <---- Global Variables ---------------------------------------------------------------------------------------------
# ----- Tests Runtime Variables ------------------------------------------------------------------------------------->
RUNTIME_VARS = RuntimeVars(
TMP=paths.TMP,
SYS_TMP_DIR=paths.SYS_TMP_DIR,
FILES=paths.FILES,
CONF_DIR=paths.CONF_DIR,
PILLAR_DIR=paths.PILLAR_DIR,
ENGINES_DIR=paths.ENGINES_DIR,
LOG_HANDLERS_DIR=paths.LOG_HANDLERS_DIR,
TMP_ROOT_DIR=paths.TMP_ROOT_DIR,
TMP_CONF_DIR=paths.TMP_CONF_DIR,
TMP_MINION_CONF_DIR=paths.TMP_MINION_CONF_DIR,
TMP_CONF_MASTER_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "master.d"),
TMP_CONF_MINION_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "minion.d"),
TMP_CONF_PROXY_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "proxy.d"),
TMP_CONF_CLOUD_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "cloud.conf.d"),
TMP_CONF_CLOUD_PROFILE_INCLUDES=os.path.join(
paths.TMP_CONF_DIR, "cloud.profiles.d"
),
TMP_CONF_CLOUD_PROVIDER_INCLUDES=os.path.join(
paths.TMP_CONF_DIR, "cloud.providers.d"
),
TMP_SUB_MINION_CONF_DIR=paths.TMP_SUB_MINION_CONF_DIR,
TMP_SYNDIC_MASTER_CONF_DIR=paths.TMP_SYNDIC_MASTER_CONF_DIR,
TMP_SYNDIC_MINION_CONF_DIR=paths.TMP_SYNDIC_MINION_CONF_DIR,
TMP_SSH_CONF_DIR=paths.TMP_SSH_CONF_DIR,
TMP_SCRIPT_DIR=paths.TMP_SCRIPT_DIR,
TMP_STATE_TREE=paths.TMP_STATE_TREE,
TMP_BASEENV_STATE_TREE=paths.TMP_STATE_TREE,
TMP_PILLAR_TREE=paths.TMP_PILLAR_TREE,
TMP_BASEENV_PILLAR_TREE=paths.TMP_PILLAR_TREE,
TMP_PRODENV_STATE_TREE=paths.TMP_PRODENV_STATE_TREE,
TMP_PRODENV_PILLAR_TREE=paths.TMP_PRODENV_PILLAR_TREE,
SHELL_TRUE_PATH=salt.utils.path.which("true")
if not salt.utils.platform.is_windows()
else "cmd /c exit 0 > nul",
SHELL_FALSE_PATH=salt.utils.path.which("false")
if not salt.utils.platform.is_windows()
else "cmd /c exit 1 > nul",
RUNNING_TESTS_USER=this_user(),
RUNTIME_CONFIGS={},
CODE_DIR=paths.CODE_DIR,
SALT_CODE_DIR=paths.SALT_CODE_DIR,
BASE_FILES=paths.BASE_FILES,
PROD_FILES=paths.PROD_FILES,
TESTS_DIR=paths.TESTS_DIR,
)
# <---- Tests Runtime Variables --------------------------------------------------------------------------------------

View file

@ -0,0 +1,256 @@
"""
tests.support.sminion
~~~~~~~~~~~~~~~~~~~~~
SMinion's support functions
"""
import fnmatch
import hashlib
import logging
import os
import shutil
import sys
import salt.minion
import salt.utils.path
import salt.utils.stringutils
from tests.support.runtests import RUNTIME_VARS
log = logging.getLogger(__name__)
DEFAULT_SMINION_ID = "pytest-internal-sminion"
def build_minion_opts(
minion_id=None,
root_dir=None,
initial_conf_file=None,
minion_opts_overrides=None,
skip_cached_opts=False,
cache_opts=True,
minion_role=None,
):
if minion_id is None:
minion_id = DEFAULT_SMINION_ID
if skip_cached_opts is False:
try:
opts_cache = build_minion_opts.__cached_opts__
except AttributeError:
opts_cache = build_minion_opts.__cached_opts__ = {}
cached_opts = opts_cache.get(minion_id)
if cached_opts:
return cached_opts
log.info("Generating testing minion %r configuration...", minion_id)
if root_dir is None:
hashed_minion_id = hashlib.sha1()
hashed_minion_id.update(salt.utils.stringutils.to_bytes(minion_id))
root_dir = os.path.join(
RUNTIME_VARS.TMP_ROOT_DIR, hashed_minion_id.hexdigest()[:6]
)
if initial_conf_file is not None:
minion_opts = salt.config._read_conf_file(
initial_conf_file
) # pylint: disable=protected-access
else:
minion_opts = {}
conf_dir = os.path.join(root_dir, "conf")
conf_file = os.path.join(conf_dir, "minion")
minion_opts["id"] = minion_id
minion_opts["conf_file"] = conf_file
minion_opts["root_dir"] = root_dir
minion_opts["cachedir"] = "cache"
minion_opts["user"] = RUNTIME_VARS.RUNNING_TESTS_USER
minion_opts["pki_dir"] = "pki"
minion_opts["hosts.file"] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, "hosts")
minion_opts["aliases.file"] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, "aliases")
minion_opts["file_client"] = "local"
minion_opts["server_id_use_crc"] = "adler32"
minion_opts["pillar_roots"] = {"base": [RUNTIME_VARS.TMP_PILLAR_TREE]}
minion_opts["file_roots"] = {
"base": [
# Let's support runtime created files that can be used like:
# salt://my-temp-file.txt
RUNTIME_VARS.TMP_STATE_TREE
],
# Alternate root to test __env__ choices
"prod": [
os.path.join(RUNTIME_VARS.FILES, "file", "prod"),
RUNTIME_VARS.TMP_PRODENV_STATE_TREE,
],
}
if initial_conf_file and initial_conf_file.startswith(RUNTIME_VARS.FILES):
# We assume we were passed a minion configuration file defined fo testing and, as such
# we define the file and pillar roots to include the testing states/pillar trees
minion_opts["pillar_roots"]["base"].append(
os.path.join(RUNTIME_VARS.FILES, "pillar", "base"),
)
minion_opts["file_roots"]["base"].append(
os.path.join(RUNTIME_VARS.FILES, "file", "base"),
)
minion_opts["file_roots"]["prod"].append(
os.path.join(RUNTIME_VARS.FILES, "file", "prod"),
)
# We need to copy the extension modules into the new master root_dir or
# it will be prefixed by it
extension_modules_path = os.path.join(root_dir, "extension_modules")
if not os.path.exists(extension_modules_path):
shutil.copytree(
os.path.join(RUNTIME_VARS.FILES, "extension_modules"),
extension_modules_path,
)
minion_opts["extension_modules"] = extension_modules_path
# Custom grains
if "grains" not in minion_opts:
minion_opts["grains"] = {}
if minion_role is not None:
minion_opts["grains"]["role"] = minion_role
# Under windows we can't seem to properly create a virtualenv off of another
# virtualenv, we can on linux but we will still point to the virtualenv binary
# outside the virtualenv running the test suite, if that's the case.
try:
real_prefix = sys.real_prefix
# The above attribute exists, this is a virtualenv
if salt.utils.platform.is_windows():
virtualenv_binary = os.path.join(real_prefix, "Scripts", "virtualenv.exe")
else:
# We need to remove the virtualenv from PATH or we'll get the virtualenv binary
# from within the virtualenv, we don't want that
path = os.environ.get("PATH")
if path is not None:
path_items = path.split(os.pathsep)
for item in path_items[:]:
if item.startswith(sys.base_prefix):
path_items.remove(item)
os.environ["PATH"] = os.pathsep.join(path_items)
virtualenv_binary = salt.utils.path.which("virtualenv")
if path is not None:
# Restore previous environ PATH
os.environ["PATH"] = path
if not virtualenv_binary.startswith(real_prefix):
virtualenv_binary = None
if virtualenv_binary and not os.path.exists(virtualenv_binary):
# It doesn't exist?!
virtualenv_binary = None
except AttributeError:
# We're not running inside a virtualenv
virtualenv_binary = None
if virtualenv_binary:
minion_opts["venv_bin"] = virtualenv_binary
# Override minion_opts with minion_opts_overrides
if minion_opts_overrides:
minion_opts.update(minion_opts_overrides)
if not os.path.exists(conf_dir):
os.makedirs(conf_dir)
with salt.utils.files.fopen(conf_file, "w") as fp_:
salt.utils.yaml.safe_dump(minion_opts, fp_, default_flow_style=False)
log.info("Generating testing minion %r configuration completed.", minion_id)
minion_opts = salt.config.minion_config(
conf_file, minion_id=minion_id, cache_minion_id=True
)
salt.utils.verify.verify_env(
[
os.path.join(minion_opts["pki_dir"], "accepted"),
os.path.join(minion_opts["pki_dir"], "rejected"),
os.path.join(minion_opts["pki_dir"], "pending"),
os.path.dirname(minion_opts["log_file"]),
minion_opts["extension_modules"],
minion_opts["cachedir"],
minion_opts["sock_dir"],
RUNTIME_VARS.TMP_STATE_TREE,
RUNTIME_VARS.TMP_PILLAR_TREE,
RUNTIME_VARS.TMP_PRODENV_STATE_TREE,
RUNTIME_VARS.TMP,
],
RUNTIME_VARS.RUNNING_TESTS_USER,
root_dir=root_dir,
)
if cache_opts:
try:
opts_cache = build_minion_opts.__cached_opts__
except AttributeError:
opts_cache = build_minion_opts.__cached_opts__ = {}
opts_cache[minion_id] = minion_opts
return minion_opts
def create_sminion(
minion_id=None,
root_dir=None,
initial_conf_file=None,
sminion_cls=salt.minion.SMinion,
minion_opts_overrides=None,
skip_cached_minion=False,
cache_sminion=True,
):
if minion_id is None:
minion_id = DEFAULT_SMINION_ID
if skip_cached_minion is False:
try:
minions_cache = create_sminion.__cached_minions__
except AttributeError:
create_sminion.__cached_minions__ = {}
cached_minion = create_sminion.__cached_minions__.get(minion_id)
if cached_minion:
return cached_minion
minion_opts = build_minion_opts(
minion_id=minion_id,
root_dir=root_dir,
initial_conf_file=initial_conf_file,
minion_opts_overrides=minion_opts_overrides,
skip_cached_opts=skip_cached_minion,
cache_opts=cache_sminion,
)
log.info("Instantiating a testing %s(%s)", sminion_cls.__name__, minion_id)
sminion = sminion_cls(minion_opts)
if cache_sminion:
try:
minions_cache = create_sminion.__cached_minions__
except AttributeError:
minions_cache = create_sminion.__cached_minions__ = {}
minions_cache[minion_id] = sminion
return sminion
def check_required_sminion_attributes(sminion_attr, required_items):
"""
:param sminion_attr: The name of the sminion attribute to check, such as 'functions' or 'states'
:param required_items: The items that must be part of the designated sminion attribute for the decorated test
:return The packages that are not available
"""
required_salt_items = set(required_items)
sminion = create_sminion(minion_id=DEFAULT_SMINION_ID)
available_items = list(getattr(sminion, sminion_attr))
not_available_items = set()
name = "__not_available_{items}s__".format(items=sminion_attr)
if not hasattr(sminion, name):
setattr(sminion, name, set())
cached_not_available_items = getattr(sminion, name)
for not_available_item in cached_not_available_items:
if not_available_item in required_salt_items:
not_available_items.add(not_available_item)
required_salt_items.remove(not_available_item)
for required_item_name in required_salt_items:
search_name = required_item_name
if "." not in search_name:
search_name += ".*"
if not fnmatch.filter(available_items, search_name):
not_available_items.add(required_item_name)
cached_not_available_items.add(required_item_name)
return not_available_items

View file

@ -360,7 +360,6 @@ $modules = "acme",
"runit",
"s6",
"scsi",
"seed",
"sensors",
"service",
"shadow",

View file

@ -140,9 +140,3 @@ del __define_global_system_encoding_variable__
import salt._logging # isort:skip
# pylint: enable=unused-import
# When we are running in a 'onedir' environment, setup the path for user
# installed packages.
if hasattr(sys, "RELENV"):
sys.path.insert(0, str(sys.RELENV / "extras-{}.{}".format(*sys.version_info)))

View file

@ -16,4 +16,7 @@ class SaltSSH(salt.utils.parsers.SaltSSHOptionParser):
self.parse_args()
ssh = salt.client.ssh.SSH(self.config)
ssh.run()
try:
ssh.run()
finally:
ssh.fsclient.destroy()

View file

@ -1427,7 +1427,8 @@ class Cloud:
raise SaltCloudSystemExit("Failed to deploy VM")
continue
if self.opts.get("show_deploy_args", False) is False:
ret[name].pop("deploy_kwargs", None)
if isinstance(ret[name], dict):
ret[name].pop("deploy_kwargs", None)
except (SaltCloudSystemExit, SaltCloudConfigError) as exc:
if len(names) == 1:
raise

View file

@ -231,10 +231,11 @@ def post_master_init(self, master):
}
},
persist=True,
fire_event=False,
)
log.info("Added mine.update to scheduler")
else:
self.schedule.delete_job("__mine_interval", persist=True)
self.schedule.delete_job("__mine_interval", persist=True, fire_event=False)
# add master_alive job if enabled
if self.opts["transport"] != "tcp" and self.opts["master_alive_interval"] > 0:
@ -250,6 +251,7 @@ def post_master_init(self, master):
}
},
persist=True,
fire_event=False,
)
if (
self.opts["master_failback"]
@ -268,18 +270,24 @@ def post_master_init(self, master):
}
},
persist=True,
fire_event=False,
)
else:
self.schedule.delete_job(
salt.minion.master_event(type="failback"), persist=True
salt.minion.master_event(type="failback"),
persist=True,
fire_event=False,
)
else:
self.schedule.delete_job(
salt.minion.master_event(type="alive", master=self.opts["master"]),
persist=True,
fire_event=False,
)
self.schedule.delete_job(
salt.minion.master_event(type="failback"), persist=True
salt.minion.master_event(type="failback"),
persist=True,
fire_event=False,
)
# proxy keepalive
@ -304,10 +312,15 @@ def post_master_init(self, master):
}
},
persist=True,
fire_event=False,
)
self.schedule.enable_schedule()
self.schedule.enable_schedule(fire_event=False)
else:
self.schedule.delete_job("__proxy_keepalive", persist=True)
self.schedule.delete_job(
"__proxy_keepalive",
persist=True,
fire_event=False,
)
# Sync the grains here so the proxy can communicate them to the master
self.functions["saltutil.sync_grains"](saltenv="base")
@ -321,10 +334,11 @@ def post_master_init(self, master):
self.proxy_context = {}
self.add_periodic_callback("cleanup", self.cleanup_subprocesses)
_failed = list()
if self.opts["proxy"].get("parallel_startup"):
log.debug("Initiating parallel startup for proxies")
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = [
futures = {
executor.submit(
subproxy_post_master_init,
_id,
@ -332,12 +346,22 @@ def post_master_init(self, master):
self.opts,
self.proxy,
self.utils,
)
): _id
for _id in self.opts["proxy"].get("ids", [])
]
}
for f in concurrent.futures.as_completed(futures):
sub_proxy_data = f.result()
for future in concurrent.futures.as_completed(futures):
try:
sub_proxy_data = future.result()
except Exception as exc: # pylint: disable=broad-except
_id = futures[future]
log.info(
"An exception occured during initialization for %s, skipping: %s",
_id,
exc,
)
_failed.append(_id)
continue
minion_id = sub_proxy_data["proxy_opts"].get("id")
if sub_proxy_data["proxy_minion"]:
@ -347,16 +371,24 @@ def post_master_init(self, master):
if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]:
self.deltaproxy_objs[
minion_id
].req_channel = salt.transport.client.AsyncReqChannel.factory(
].req_channel = salt.channel.client.AsyncReqChannel.factory(
sub_proxy_data["proxy_opts"], io_loop=self.io_loop
)
else:
log.debug("Initiating non-parallel startup for proxies")
for _id in self.opts["proxy"].get("ids", []):
sub_proxy_data = subproxy_post_master_init(
_id, uid, self.opts, self.proxy, self.utils
)
try:
sub_proxy_data = subproxy_post_master_init(
_id, uid, self.opts, self.proxy, self.utils
)
except Exception as exc: # pylint: disable=broad-except
log.info(
"An exception occured during initialization for %s, skipping: %s",
_id,
exc,
)
_failed.append(_id)
continue
minion_id = sub_proxy_data["proxy_opts"].get("id")
if sub_proxy_data["proxy_minion"]:
@ -366,10 +398,12 @@ def post_master_init(self, master):
if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]:
self.deltaproxy_objs[
minion_id
].req_channel = salt.transport.client.AsyncReqChannel.factory(
].req_channel = salt.channel.client.AsyncReqChannel.factory(
sub_proxy_data["proxy_opts"], io_loop=self.io_loop
)
if _failed:
log.info("Following sub proxies failed %s", _failed)
self.ready = True
@ -535,10 +569,13 @@ def subproxy_post_master_init(minion_id, uid, opts, main_proxy, main_utils):
}
},
persist=True,
fire_event=False,
)
_proxy_minion.schedule.enable_schedule()
_proxy_minion.schedule.enable_schedule(fire_event=False)
else:
_proxy_minion.schedule.delete_job("__proxy_keepalive", persist=True)
_proxy_minion.schedule.delete_job(
"__proxy_keepalive", persist=True, fire_event=False
)
return {"proxy_minion": _proxy_minion, "proxy_opts": proxyopts}

View file

@ -1363,7 +1363,7 @@ class Minion(MinionBase):
)
# a long-running req channel
self.req_channel = salt.transport.client.AsyncReqChannel.factory(
self.req_channel = salt.channel.client.AsyncReqChannel.factory(
self.opts, io_loop=self.io_loop
)
@ -2817,10 +2817,8 @@ class Minion(MinionBase):
self.opts["master"],
)
self.req_channel = (
salt.transport.client.AsyncReqChannel.factory(
self.opts, io_loop=self.io_loop
)
self.req_channel = salt.channel.client.AsyncReqChannel.factory(
self.opts, io_loop=self.io_loop
)
# put the current schedule into the new loaders

View file

@ -381,6 +381,9 @@ def refresh_grains(**kwargs):
refresh_pillar : True
Set to ``False`` to keep pillar data from being refreshed.
clean_pillar_cache : False
Set to ``True`` to refresh pillar cache.
CLI Examples:
.. code-block:: bash
@ -389,6 +392,7 @@ def refresh_grains(**kwargs):
"""
kwargs = salt.utils.args.clean_kwargs(**kwargs)
_refresh_pillar = kwargs.pop("refresh_pillar", True)
clean_pillar_cache = kwargs.pop("clean_pillar_cache", False)
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
# Modules and pillar need to be refreshed in case grains changes affected
@ -396,14 +400,18 @@ def refresh_grains(**kwargs):
# newly-reloaded grains to each execution module's __grains__ dunder.
if _refresh_pillar:
# we don't need to call refresh_modules here because it's done by refresh_pillar
refresh_pillar()
refresh_pillar(clean_cache=clean_pillar_cache)
else:
refresh_modules()
return True
def sync_grains(
saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None
saltenv=None,
refresh=True,
extmod_whitelist=None,
extmod_blacklist=None,
clean_pillar_cache=False,
):
"""
.. versionadded:: 0.10.0
@ -430,6 +438,9 @@ def sync_grains(
extmod_blacklist : None
comma-separated list of modules to blacklist based on type
clean_pillar_cache : False
Set to ``True`` to refresh pillar cache.
CLI Examples:
.. code-block:: bash
@ -441,7 +452,7 @@ def sync_grains(
ret = _sync("grains", saltenv, extmod_whitelist, extmod_blacklist)
if refresh:
# we don't need to call refresh_modules here because it's done by refresh_pillar
refresh_pillar()
refresh_pillar(clean_cache=clean_pillar_cache)
return ret
@ -915,7 +926,11 @@ def sync_log_handlers(
def sync_pillar(
saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None
saltenv=None,
refresh=True,
extmod_whitelist=None,
extmod_blacklist=None,
clean_pillar_cache=False,
):
"""
.. versionadded:: 2015.8.11,2016.3.2
@ -935,6 +950,9 @@ def sync_pillar(
extmod_blacklist : None
comma-separated list of modules to blacklist based on type
clean_pillar_cache : False
Set to ``True`` to refresh pillar cache.
.. note::
This function will raise an error if executed on a traditional (i.e.
not masterless) minion
@ -953,7 +971,7 @@ def sync_pillar(
ret = _sync("pillar", saltenv, extmod_whitelist, extmod_blacklist)
if refresh:
# we don't need to call refresh_modules here because it's done by refresh_pillar
refresh_pillar()
refresh_pillar(clean_cache=clean_pillar_cache)
return ret
@ -998,7 +1016,13 @@ def sync_executors(
return ret
def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None):
def sync_all(
saltenv=None,
refresh=True,
extmod_whitelist=None,
extmod_blacklist=None,
clean_pillar_cache=False,
):
"""
.. versionchanged:: 2015.8.11,2016.3.2
On masterless minions, pillar modules are now synced, and refreshed
@ -1036,6 +1060,9 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist
extmod_blacklist : None
dictionary of modules to blacklist based on type
clean_pillar_cache : False
Set to ``True`` to refresh pillar cache.
CLI Examples:
.. code-block:: bash
@ -1080,7 +1107,7 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist
ret["pillar"] = sync_pillar(saltenv, False, extmod_whitelist, extmod_blacklist)
if refresh:
# we don't need to call refresh_modules here because it's done by refresh_pillar
refresh_pillar()
refresh_pillar(clean_cache=clean_pillar_cache)
return ret

View file

@ -381,7 +381,7 @@ def set_value(
else:
pol_data[key] = {v_name: {"data": v_data, "type": v_type}}
write_reg_pol(pol_data)
write_reg_pol(pol_data, policy_class=policy_class)
return salt.utils.win_reg.set_value(
hive=hive,
@ -464,7 +464,7 @@ def disable_value(key, v_name, policy_class="machine"):
else:
pol_data[key] = {"**del.{}".format(v_name): {"data": " ", "type": "REG_SZ"}}
write_reg_pol(pol_data)
write_reg_pol(pol_data, policy_class=policy_class)
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
@ -534,7 +534,7 @@ def delete_value(key, v_name, policy_class="Machine"):
else:
return None
write_reg_pol(pol_data)
write_reg_pol(pol_data, policy_class=policy_class)
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)

View file

@ -92,7 +92,9 @@ def _handle_old_salt_host_resource(resource):
ret[MINION_ID] = attrs.get(MINION_ID)
valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys())
for attr in valid_attrs:
ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr))
ret[attr] = _cast_output_to_type(
attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr)
)
return ret
@ -110,7 +112,9 @@ def _handle_new_salt_host_resource(resource):
ret[MINION_ID] = attrs.get(MINION_ID)
valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys())
for attr in valid_attrs:
ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr))
ret[attr] = _cast_output_to_type(
attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr)
)
log.info(ret)
rets.append(ret)
return rets
@ -134,8 +138,16 @@ def _add_ssh_key(ret):
ret["priv"] = priv
def _cast_output_to_type(value, typ):
def _cast_output_to_type(attr, value, typ):
"""cast the value depending on the terraform type"""
if value is None:
# Timeout needs to default to 0 if the value is None
# The ssh command that is run cannot handle `-o ConnectTimeout=None`
if attr == "timeout":
return 0
else:
return value
if value is None:
return value
if typ == "b":

View file

@ -334,6 +334,15 @@ __func_alias__ = {
}
def _http_ftp_check(source):
"""
Check if source or sources is http, https or ftp.
"""
if isinstance(source, str):
return source.lower().startswith(("http:", "https:", "ftp:"))
return any([s.lower().startswith(("http:", "https:", "ftp:")) for s in source])
def _get_accumulator_filepath():
"""
Return accumulator data path.
@ -2414,6 +2423,8 @@ def managed(
- source: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz
- source_hash: md5=79eef25f9b0b2c642c62b7f737d4f53f
source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server.
Known issues:
If the remote server URL has the hash file as an apparent
sub-directory of the source file, the module will discover that it
@ -2946,6 +2957,9 @@ def managed(
"'contents_grains' is permitted",
)
if source is not None and not _http_ftp_check(source) and source_hash:
log.warning("source_hash is only used with 'http', 'https' or 'ftp'")
# If no source is specified, set replace to False, as there is nothing
# with which to replace the file.
if not source and contents_count == 0 and replace:
@ -5998,6 +6012,9 @@ def blockreplace(
if not name:
return _error(ret, "Must provide name to file.blockreplace")
if source is not None and not _http_ftp_check(source) and source_hash:
log.warning("source_hash is only used with 'http', 'https' or 'ftp'")
if sources is None:
sources = []
if source_hashes is None:
@ -6434,6 +6451,9 @@ def append(
if not name:
return _error(ret, "Must provide name to file.append")
if source is not None and not _http_ftp_check(source) and source_hash:
log.warning("source_hash is only used with 'http', 'https' or 'ftp'")
name = os.path.expanduser(name)
if sources is None:
@ -6718,6 +6738,9 @@ def prepend(
if not name:
return _error(ret, "Must provide name to file.prepend")
if source is not None and not _http_ftp_check(source) and source_hash:
log.warning("source_hash is only used with 'http', 'https' or 'ftp'")
if sources is None:
sources = []
@ -8937,6 +8960,25 @@ def cached(
else:
source_sum = {}
if __opts__["test"]:
local_copy = __salt__["cp.is_cached"](name, saltenv=saltenv)
if local_copy:
if source_sum:
hash = __salt__["file.get_hash"](local_copy, __opts__["hash_type"])
if hash == source_sum["hsum"]:
ret["comment"] = "File already cached: {}".format(name)
else:
ret[
"comment"
] = "Hashes don't match.\nFile will be cached: {}".format(name)
else:
ret["comment"] = "No hash found. File will be cached: {}".format(name)
else:
ret["comment"] = "File will be cached: {}".format(name)
ret["changes"] = {}
ret["result"] = None
return ret
if parsed.scheme in salt.utils.files.LOCAL_PROTOS:
# Source is a local file path
full_path = os.path.realpath(os.path.expanduser(parsed.path))

View file

@ -72,6 +72,27 @@ def __virtual__():
return __virtualname__
def _get_current(key, name, policy_class):
"""
Helper function to get the current state of the policy
"""
hive = "HKLM"
if policy_class == "User":
hive = "HKCU"
pol = __salt__["lgpo_reg.get_value"](
key=key, v_name=name, policy_class=policy_class
)
reg_raw = __utils__["reg.read_value"](hive=hive, key=key, vname=name)
reg = {}
if reg_raw["vdata"] is not None:
reg["data"] = reg_raw["vdata"]
if reg_raw["vtype"] is not None:
reg["type"] = reg_raw["vtype"]
return {"pol": pol, "reg": reg}
def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine"):
r"""
Ensure a registry setting is present in the Registry.pol file.
@ -128,16 +149,29 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine")
"""
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
old = __salt__["lgpo_reg.get_value"](
key=key, v_name=name, policy_class=policy_class
old = _get_current(key=key, name=name, policy_class=policy_class)
pol_correct = (
str(old["pol"].get("data", "")) == str(v_data)
and old["pol"].get("type", "") == v_type
)
if old.get("data", "") == v_data and old.get("type", "") == v_type:
ret["comment"] = "Registry.pol value already present"
reg_correct = (
str(old["reg"].get("data", "")) == str(v_data)
and old["reg"].get("type", "") == v_type
)
if pol_correct and reg_correct:
ret["comment"] = "Policy value already present\nRegistry value already present"
ret["result"] = True
return ret
if __opts__["test"]:
ret["comment"] = "Registry.pol value will be set"
if not pol_correct:
ret["comment"] = "Policy value will be set"
if not reg_correct:
if ret["comment"]:
ret["comment"] += "\n"
ret["comment"] += "Registry value will be set"
ret["result"] = None
return ret
@ -149,15 +183,24 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine")
policy_class=policy_class,
)
new = __salt__["lgpo_reg.get_value"](
key=key, v_name=name, policy_class=policy_class
new = _get_current(key=key, name=name, policy_class=policy_class)
pol_correct = (
str(new["pol"]["data"]) == str(v_data) and new["pol"]["type"] == v_type
)
reg_correct = (
str(new["reg"]["data"]) == str(v_data) and new["reg"]["type"] == v_type
)
if str(new["data"]) == v_data and new["type"] == v_type:
ret["comment"] = "Registry.pol value has been set"
if pol_correct and reg_correct:
ret["comment"] = "Registry policy value has been set"
ret["result"] = True
else:
ret["comment"] = "Failed to set Registry.pol value"
elif not pol_correct:
ret["comment"] = "Failed to set policy value"
elif not reg_correct:
if ret["comment"]:
ret["comment"] += "\n"
ret["comment"] += "Failed to set registry value"
changes = salt.utils.data.recursive_diff(old, new)
@ -206,30 +249,42 @@ def value_disabled(name, key, policy_class="Machine"):
"""
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
old = __salt__["lgpo_reg.get_value"](
key=key, v_name=name, policy_class=policy_class
)
if old.get("data", "") == "**del.{}".format(name):
ret["comment"] = "Registry.pol value already disabled"
old = _get_current(key=key, name=name, policy_class=policy_class)
pol_correct = old["pol"].get("data", "") == "**del.{}".format(name)
reg_correct = old["reg"] == {}
if pol_correct and reg_correct:
ret["comment"] = "Registry policy value already disabled"
ret["result"] = True
return ret
if __opts__["test"]:
ret["comment"] = "Registry.pol value will be disabled"
if not pol_correct:
ret["comment"] = "Policy value will be disabled"
if not reg_correct:
if ret["comment"]:
ret["comment"] += "\n"
ret["comment"] += "Registry value will be removed"
ret["result"] = None
return ret
__salt__["lgpo_reg.disable_value"](key=key, v_name=name, policy_class=policy_class)
new = __salt__["lgpo_reg.get_value"](
key=key, v_name=name, policy_class=policy_class
)
new = _get_current(key=key, name=name, policy_class=policy_class)
if "**del." in str(new["data"]) and new["type"] == "REG_SZ":
ret["comment"] = "Registry.pol value disabled"
pol_correct = new["pol"].get("data", "") == "**del.{}".format(name)
reg_correct = new["reg"] == {}
if pol_correct and reg_correct:
ret["comment"] = "Registry policy value disabled"
ret["result"] = True
else:
ret["comment"] = "Failed to disable Registry.pol value"
elif not pol_correct:
ret["comment"] = "Failed to disable policy value"
elif not reg_correct:
if ret["comment"]:
ret["comment"] += "\n"
ret["comment"] += "Failed to remove registry value"
changes = salt.utils.data.recursive_diff(old, new)
@ -278,32 +333,42 @@ def value_absent(name, key, policy_class="Machine"):
"""
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
old = __salt__["lgpo_reg.get_value"](
key=key, v_name=name, policy_class=policy_class
)
if not old:
ret["comment"] = "Registry.pol value already absent"
old = _get_current(key=key, name=name, policy_class=policy_class)
pol_correct = old["pol"] == {}
reg_correct = old["reg"] == {}
if pol_correct and reg_correct:
ret["comment"] = "Registry policy value already deleted"
ret["result"] = True
return ret
if __opts__["test"]:
ret["comment"] = "Registry.pol value will be deleted"
if not pol_correct:
ret["comment"] = "Policy value will be deleted"
if not reg_correct:
if ret["comment"]:
ret["comment"] += "\n"
ret["comment"] += "Registry value will be deleted"
ret["result"] = None
return ret
__salt__["lgpo_reg.delete_value"](key=key, v_name=name, policy_class=policy_class)
new = __salt__["lgpo_reg.get_value"](
key=key, v_name=name, policy_class=policy_class
)
new = _get_current(key=key, name=name, policy_class=policy_class)
if not new:
ret["comment"] = "Registry.pol value deleted"
pol_correct = new["pol"] == {}
reg_correct = new["reg"] == {}
if pol_correct and reg_correct:
ret["comment"] = "Registry policy value deleted"
ret["result"] = True
# We're setting this here in case new is None
new = {}
else:
ret["comment"] = "Failed to delete Registry.pol value"
elif not pol_correct:
ret["comment"] = "Failed to delete policy value"
elif not reg_correct:
if ret["comment"]:
ret["comment"] += "\n"
ret["comment"] += "Failed to delete registry value"
changes = salt.utils.data.recursive_diff(old, new)

View file

@ -13,8 +13,6 @@ from salt.utils.versions import warn_until
log = logging.getLogger(__name__)
# XXX: Add depreication warnings to start using salt.channel.client
class ReqChannel:
"""

View file

@ -13,7 +13,6 @@ import salt.ext.tornado.concurrent
import salt.ext.tornado.gen
import salt.ext.tornado.ioloop
import salt.ext.tornado.netutil
import salt.transport.client
import salt.transport.frame
import salt.utils.msgpack
from salt.ext.tornado.ioloop import IOLoop

Some files were not shown because too many files have changed in this diff Show more