Merge pull request #64002 from s0undt3ch/hotfix/merge-forward

[master] Merge 3006.x into master
This commit is contained in:
Pedro Algarvio 2023-04-01 09:40:18 +01:00 committed by GitHub
commit 70d6164770
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
51 changed files with 1586 additions and 553 deletions

View file

@ -25,16 +25,19 @@ runs:
steps:
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}.patch
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
shell: bash
run: |
tools pkg configure-git
- name: Apply Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
shell: bash
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete

View file

@ -22,7 +22,7 @@ inputs:
required: false
type: string
description: The version of relenv to use
default: 0.10.0
default: 0.10.1
outputs:
version:

View file

@ -44,6 +44,7 @@ jobs:
path: pkgs/checkout/artifacts/
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}.patch
@ -62,11 +63,13 @@ jobs:
cwd: pkgs/checkout/
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
working-directory: pkgs/checkout/
run: |
tools pkg configure-git
- name: Apply release patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
working-directory: pkgs/checkout/
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete

View file

@ -35,6 +35,7 @@ jobs:
- uses: actions/checkout@v3
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}.patch
@ -50,10 +51,12 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg configure-git
- name: Apply release patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete

View file

@ -39,6 +39,7 @@ jobs:
path: artifacts/
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}.patch
@ -53,10 +54,12 @@ jobs:
salt-version: "${{ inputs.salt-version }}"
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg configure-git
- name: Apply release patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete

View file

@ -2,14 +2,14 @@
# Instead, edit the template '.github/workflows/templates/ci.yml.jinja'
---
name: CI
run-name: "CI (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('branch: {0}', github.ref_name) }})"
run-name: "CI (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('{0}: {1}', startsWith(github.event.ref, 'refs/tags') && 'tag' || 'branch', github.ref_name) }})"
on:
push: {}
pull_request: {}
env:
COLUMNS: 190
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
permissions:
@ -256,28 +256,30 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
# TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning
# issues on pre-3006 development versions on deb-based distros.
- name: Update Debian changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}"
tools changelog update-deb --draft
tools changelog update-deb
- name: Update RPM changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-rpm --draft
tools changelog update-rpm
- name: Update Release Notes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-release-notes --draft
tools changelog update-release-notes
- name: Generate MAN Pages
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}"
SALT_ON_SALTSTACK: "1"
@ -286,22 +288,26 @@ jobs:
- name: Update Changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-changelog-md --draft
tools changelog update-changelog-md
- name: Show Changes Diff
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git diff --color
- name: Configure Git
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git config --global user.name "Salt Project Packaging"
git config --global user.email saltproject-packaging@vmware.com
- name: Setup Pre-Commit
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: ./.github/actions/setup-pre-commit
with:
version: "3.0.4"
@ -309,6 +315,7 @@ jobs:
- name: Commit Changes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
SKIP: lint-salt,lint-tests
run: |
@ -318,11 +325,13 @@ jobs:
- name: Create release changes patch
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
- name: Upload Changes Diff Artifact
uses: actions/upload-artifact@v3
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
@ -393,7 +402,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"
@ -411,7 +420,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"

View file

@ -13,7 +13,7 @@ on:
env:
COLUMNS: 190
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
permissions:
@ -300,28 +300,30 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
# TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning
# issues on pre-3006 development versions on deb-based distros.
- name: Update Debian changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}"
tools changelog update-deb --draft
tools changelog update-deb
- name: Update RPM changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-rpm --draft
tools changelog update-rpm
- name: Update Release Notes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-release-notes --draft
tools changelog update-release-notes
- name: Generate MAN Pages
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}"
SALT_ON_SALTSTACK: "1"
@ -330,22 +332,26 @@ jobs:
- name: Update Changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-changelog-md --draft
tools changelog update-changelog-md
- name: Show Changes Diff
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git diff --color
- name: Configure Git
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git config --global user.name "Salt Project Packaging"
git config --global user.email saltproject-packaging@vmware.com
- name: Setup Pre-Commit
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: ./.github/actions/setup-pre-commit
with:
version: "3.0.4"
@ -353,6 +359,7 @@ jobs:
- name: Commit Changes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
SKIP: lint-salt,lint-tests
run: |
@ -362,11 +369,13 @@ jobs:
- name: Create release changes patch
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
- name: Upload Changes Diff Artifact
uses: actions/upload-artifact@v3
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
@ -437,7 +446,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"
@ -455,7 +464,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"
@ -1302,7 +1311,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Standalone Repository As An Artifact
@ -1423,7 +1432,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1550,9 +1559,12 @@ jobs:
mkdir -p artifacts/pkgs/repo
- name: Create Repository
env:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1647,7 +1659,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
@ -1723,7 +1735,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
@ -1835,7 +1847,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
@ -1915,6 +1927,9 @@ jobs:
tree -a artifacts/pkgs/repo/
- name: Upload Repository Contents (nightly)
env:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo publish nightly --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/

View file

@ -17,9 +17,8 @@ on:
env:
COLUMNS: 190
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
REPO_BASE_URL: "https://${{ secrets.SALT_REPO_DOMAIN }}"
permissions:
contents: write # To be able to publish the release
@ -85,6 +84,8 @@ jobs:
validate-version: true
- name: Check Existing Releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}
@ -107,12 +108,17 @@ jobs:
- name: Clone The Salt Repository
uses: actions/checkout@v3
- name: Setup Rclone
uses: AnimMouse/setup-rclone@v1
with:
version: v1.61.1
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Backup Previous Releases
run: |
tools pkg repo backup-previous-releases --salt-version=${{ needs.prepare-workflow.outputs.salt-version }}
tools pkg repo backup-previous-releases
publish-repositories:
name: Publish Repositories
@ -139,9 +145,60 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
- name: Publish Release Repository
env:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }}
test-linux-pkg-downloads:
name: Test Linux Package Downloads
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-latest
platform: linux
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
artifacts-from-workflow: staging.yml
secrets: inherit
test-macos-pkg-downloads:
name: Test macOS Package Downloads
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-macos.yml
with:
distro-slug: macos-12
platform: darwin
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
artifacts-from-workflow: staging.yml
secrets: inherit
test-windows-pkg-downloads:
name: Test Windows Package Downloads
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: windows-2022
platform: windows
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
skip-code-coverage: true
artifacts-from-workflow: staging.yml
secrets: inherit
release:
name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
runs-on:
@ -152,6 +209,9 @@ jobs:
- prepare-workflow
- backup
- publish-repositories
- test-linux-pkg-downloads
- test-macos-pkg-downloads
- test-windows-pkg-downloads
environment: release
steps:
- name: Clone The Salt Repository
@ -238,12 +298,97 @@ jobs:
replacesArtifacts: true
tag: v${{ needs.prepare-workflow.outputs.salt-version }}
- name: Publish to PyPi
if: ${{ github.event.repository.fork != true }}
- name: Upload PyPi Artifacts
uses: actions/upload-artifact@v3
with:
name: pypi-artifacts
path: |
release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
retention-days: 7
if-no-files-found: error
restore:
name: Restore Release Bucket From Backup
if: ${{ failure() || cancelled() }}
runs-on:
- self-hosted
- linux
- repo-release
needs:
- release
environment: release
steps:
- name: Clone The Salt Repository
uses: actions/checkout@v3
with:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
- name: Setup Rclone
uses: AnimMouse/setup-rclone@v1
with:
version: v1.61.1
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Restore Release Bucket
run: |
tools pkg repo restore-previous-releases
publish-pypi:
name: Publish to PyPi(test)
if: ${{ github.event.repository.fork != true }}
needs:
- prepare-workflow
- release
- restore
environment: release
runs-on:
- self-hosted
- linux
- repo-release
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
id: get-secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
TWINE_PASSWORD=$(aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/publishing/publish-pypi \
--query SecretString --output text | jq .default_passphrase -r | base64 -d \
| gpg --passphrase-file "$SECRETS_KEY_FILE" -d -)
echo "::add-mask::$TWINE_PASSWORD"
echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}"
- name: Download PyPi Artifacts
uses: actions/download-artifact@v3
with:
name: pypi-artifacts
path: artifacts/release
- name: Publish to Test PyPi
env:
TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}"
run: |
tools pkg pypi-upload release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
set-pipeline-exit-status:
# This step is just so we can make github require this step, to pass checks
@ -256,6 +401,8 @@ jobs:
- prepare-workflow
- publish-repositories
- release
- restore
- publish-pypi
steps:
- name: Get workflow information
id: get-workflow-info

View file

@ -12,7 +12,7 @@ on:
env:
COLUMNS: 190
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
permissions:
@ -299,28 +299,30 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
# TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning
# issues on pre-3006 development versions on deb-based distros.
- name: Update Debian changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}"
tools changelog update-deb --draft
tools changelog update-deb
- name: Update RPM changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-rpm --draft
tools changelog update-rpm
- name: Update Release Notes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-release-notes --draft
tools changelog update-release-notes
- name: Generate MAN Pages
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}"
SALT_ON_SALTSTACK: "1"
@ -329,22 +331,26 @@ jobs:
- name: Update Changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-changelog-md --draft
tools changelog update-changelog-md
- name: Show Changes Diff
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git diff --color
- name: Configure Git
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git config --global user.name "Salt Project Packaging"
git config --global user.email saltproject-packaging@vmware.com
- name: Setup Pre-Commit
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: ./.github/actions/setup-pre-commit
with:
version: "3.0.4"
@ -352,6 +358,7 @@ jobs:
- name: Commit Changes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
SKIP: lint-salt,lint-tests
run: |
@ -361,11 +368,13 @@ jobs:
- name: Create release changes patch
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
- name: Upload Changes Diff Artifact
uses: actions/upload-artifact@v3
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
@ -436,7 +445,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"
@ -454,7 +463,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"

View file

@ -25,9 +25,8 @@ on:
env:
COLUMNS: 190
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
REPO_BASE_URL: "https://${{ secrets.SALT_REPO_USER }}:${{ secrets.SALT_REPO_PASS }}@${{ secrets.SALT_REPO_DOMAIN }}"
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
@ -173,6 +172,8 @@ jobs:
validate-version: true
- name: Check Existing Releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}
@ -301,28 +302,30 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
release: true
# TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning
# issues on pre-3006 development versions on deb-based distros.
- name: Update Debian changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}"
tools changelog update-deb --draft
tools changelog update-deb
- name: Update RPM changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-rpm --draft
tools changelog update-rpm
- name: Update Release Notes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-release-notes --draft --release
tools changelog update-release-notes --release
- name: Generate MAN Pages
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}"
SALT_ON_SALTSTACK: "1"
@ -331,22 +334,26 @@ jobs:
- name: Update Changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-changelog-md --draft
tools changelog update-changelog-md
- name: Show Changes Diff
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git diff --color
- name: Configure Git
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git config --global user.name "Salt Project Packaging"
git config --global user.email saltproject-packaging@vmware.com
- name: Setup Pre-Commit
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: ./.github/actions/setup-pre-commit
with:
version: "3.0.4"
@ -354,6 +361,7 @@ jobs:
- name: Commit Changes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
SKIP: lint-salt,lint-tests
run: |
@ -363,11 +371,13 @@ jobs:
- name: Create release changes patch
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
- name: Upload Changes Diff Artifact
uses: actions/upload-artifact@v3
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
@ -438,7 +448,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"
@ -456,7 +466,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.10.0"
relenv-version: "0.10.1"
python-version-linux: "3.10.10"
python-version-macos: "3.10.10"
python-version-windows: "3.10.10"
@ -1303,7 +1313,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create src --key-id=64CBBC8173D76B3F \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Standalone Repository As An Artifact
@ -1424,7 +1434,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1551,9 +1561,14 @@ jobs:
mkdir -p artifacts/pkgs/repo
- name: Create Repository
env:
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
@ -1648,7 +1663,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create windows --key-id=64CBBC8173D76B3F \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
@ -1724,7 +1739,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create macos --key-id=64CBBC8173D76B3F \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
@ -1836,7 +1851,7 @@ jobs:
- name: Create Repository
run: |
tools pkg repo create onedir --key-id=64CBBC8173D76B3F \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
@ -1892,6 +1907,9 @@ jobs:
tree -a artifacts/pkgs/repo/
- name: Upload Repository Contents (staging)
env:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo publish staging --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/
@ -1946,6 +1964,61 @@ jobs:
run: |
tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release
- name: Upload PyPi Artifacts
uses: actions/upload-artifact@v3
with:
name: pypi-artifacts
path: |
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
retention-days: 7
if-no-files-found: error
test-linux-pkg-downloads:
name: Test Linux Package Downloads
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-linux.yml
with:
distro-slug: ubuntu-latest
platform: linux
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
secrets: inherit
test-macos-pkg-downloads:
name: Test macOS Package Downloads
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-macos.yml
with:
distro-slug: macos-12
platform: darwin
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
secrets: inherit
test-windows-pkg-downloads:
name: Test Windows Package Downloads
needs:
- prepare-workflow
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: windows-2022
platform: windows
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: staging
skip-code-coverage: true
secrets: inherit
publish-pypi:
name: Publish to PyPi(test)
if: ${{ github.event.repository.fork != true }}
@ -1994,6 +2067,9 @@ jobs:
- windows-2019-msi-pkg-tests
- windows-2022-nsis-pkg-tests
- windows-2022-msi-pkg-tests
- test-linux-pkg-downloads
- test-macos-pkg-downloads
- test-windows-pkg-downloads
environment: staging
runs-on:
- self-hosted
@ -2029,10 +2105,10 @@ jobs:
echo "::add-mask::$TWINE_PASSWORD"
echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}"
- name: Download Source Repository
- name: Download PyPi Artifacts
uses: actions/download-artifact@v3
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo
name: pypi-artifacts
path: artifacts/release
- name: Publish to Test PyPi

View file

@ -77,7 +77,7 @@
- name: Create Repository
run: |
tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo

View file

@ -53,7 +53,7 @@
- name: Create Repository
run: |
tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact

View file

@ -89,7 +89,7 @@
- name: Create Repository
run: |
tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact

View file

@ -81,9 +81,16 @@
mkdir -p artifacts/pkgs/repo
- name: Create Repository
env:
<%- if gh_environment == 'staging' %>
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
<%- endif %>
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo

View file

@ -53,7 +53,7 @@
- name: Create Repository
run: |
tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Standalone Repository As An Artifact

View file

@ -71,7 +71,7 @@
- name: Create Repository
run: |
tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \
--salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact

View file

@ -98,28 +98,30 @@ on:
release: true
<%- endif %>
# TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning
# issues on pre-3006 development versions on deb-based distros.
- name: Update Debian changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft
tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}"
tools changelog update-deb --draft
tools changelog update-deb
- name: Update RPM changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-rpm --draft
tools changelog update-rpm
- name: Update Release Notes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-release-notes --draft <%- if prepare_actual_release %> --release <%- endif %>
tools changelog update-release-notes <%- if prepare_actual_release %> --release <%- endif %>
- name: Generate MAN Pages
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}"
SALT_ON_SALTSTACK: "1"
@ -128,22 +130,26 @@ on:
- name: Update Changelog
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools changelog update-changelog-md --draft
tools changelog update-changelog-md
- name: Show Changes Diff
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git diff --color
- name: Configure Git
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git config --global user.name "Salt Project Packaging"
git config --global user.email saltproject-packaging@vmware.com
- name: Setup Pre-Commit
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: ./.github/actions/setup-pre-commit
with:
version: "<{ pre_commit_version }>"
@ -151,6 +157,7 @@ on:
- name: Commit Changes
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
env:
SKIP: lint-salt,lint-tests
run: |
@ -160,11 +167,13 @@ on:
- name: Create release changes patch
shell: bash
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
- name: Upload Changes Diff Artifact
uses: actions/upload-artifact@v3
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch

View file

@ -9,14 +9,14 @@
<%- set python_version_linux = "3.10.10" %>
<%- set python_version_macos = "3.10.10" %>
<%- set python_version_windows = "3.10.10" %>
<%- set relenv_version = "0.10.0" %>
<%- set relenv_version = "0.10.1" %>
<%- set gpg_key_id = "64CBBC8173D76B3F" %>
<%- set prepare_actual_release = prepare_actual_release | default(False) %>
<%- set release_branches = ["master", "3006.x"] %>
---
<%- block name %>
name: <{ workflow_name }>
run-name: "<{ workflow_name }> (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('branch: {0}', github.ref_name) }})"
run-name: "<{ workflow_name }> (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('{0}: {1}', startsWith(github.event.ref, 'refs/tags') && 'tag' || 'branch', github.ref_name) }})"
<%- endblock name %>
<%- block on %>
@ -31,7 +31,7 @@ on:
env:
COLUMNS: 190
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
<%- endblock env %>
@ -188,6 +188,8 @@ jobs:
<%- if prepare_actual_release %>
- name: Check Existing Releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}

View file

@ -97,6 +97,9 @@ concurrency:
tree -a artifacts/pkgs/repo/
- name: Upload Repository Contents (<{ gh_environment }>)
env:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo publish <{ gh_environment }> --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/

View file

@ -25,11 +25,6 @@ on:
<%- endblock on %>
<%- block env %>
<{- super() }>
REPO_BASE_URL: "https://${{ secrets.SALT_REPO_DOMAIN }}"
<%- endblock env %>
<%- block concurrency %>
concurrency:
@ -116,6 +111,8 @@ permissions:
validate-version: true
- name: Check Existing Releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }}
@ -144,12 +141,17 @@ permissions:
- name: Clone The Salt Repository
uses: actions/checkout@v3
- name: Setup Rclone
uses: AnimMouse/setup-rclone@v1
with:
version: v1.61.1
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Backup Previous Releases
run: |
tools pkg repo backup-previous-releases --salt-version=${{ needs.prepare-workflow.outputs.salt-version }}
tools pkg repo backup-previous-releases
publish-repositories:
<%- do conclusion_needs.append('publish-repositories') %>
@ -177,6 +179,9 @@ permissions:
uses: ./.github/actions/setup-python-tools-scripts
- name: Publish Release Repository
env:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }}
@ -284,11 +289,101 @@ permissions:
replacesArtifacts: true
tag: v${{ needs.prepare-workflow.outputs.salt-version }}
- name: Publish to PyPi
if: ${{ github.event.repository.fork != true }}
- name: Upload PyPi Artifacts
uses: actions/upload-artifact@v3
with:
name: pypi-artifacts
path: |
release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
retention-days: 7
if-no-files-found: error
restore:
<%- do conclusion_needs.append('restore') %>
name: Restore Release Bucket From Backup
if: ${{ failure() || cancelled() }}
runs-on:
- self-hosted
- linux
- repo-<{ gh_environment }>
needs:
- release
<%- for need in test_repo_needs.iter(consume=True) %>
- <{ need }>
<%- endfor %>
environment: <{ gh_environment }>
steps:
- name: Clone The Salt Repository
uses: actions/checkout@v3
with:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
- name: Setup Rclone
uses: AnimMouse/setup-rclone@v1
with:
version: v1.61.1
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Restore Release Bucket
run: |
tools pkg repo restore-previous-releases
publish-pypi:
<%- do conclusion_needs.append('publish-pypi') %>
name: Publish to PyPi(test)
if: ${{ github.event.repository.fork != true }}
needs:
- prepare-workflow
- release
- restore
environment: <{ gh_environment }>
runs-on:
- self-hosted
- linux
- repo-<{ gh_environment }>
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
id: get-secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
TWINE_PASSWORD=$(aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/publishing/publish-pypi \
--query SecretString --output text | jq .default_passphrase -r | base64 -d \
| gpg --passphrase-file "$SECRETS_KEY_FILE" -d -)
echo "::add-mask::$TWINE_PASSWORD"
echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}"
- name: Download PyPi Artifacts
uses: actions/download-artifact@v3
with:
name: pypi-artifacts
path: artifacts/release
- name: Publish to Test PyPi
env:
TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}"
run: |
tools pkg pypi-upload release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
<%- endblock jobs %>

View file

@ -37,11 +37,6 @@ on:
<%- endblock on %>
<%- block env %>
<{- super() }>
REPO_BASE_URL: "https://${{ secrets.SALT_REPO_USER }}:${{ secrets.SALT_REPO_PASS }}@${{ secrets.SALT_REPO_DOMAIN }}"
<%- endblock env %>
<%- block concurrency %>
concurrency:
@ -146,6 +141,16 @@ concurrency:
run: |
tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release
- name: Upload PyPi Artifacts
uses: actions/upload-artifact@v3
with:
name: pypi-artifacts
path: |
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
retention-days: 7
if-no-files-found: error
<%- if includes.get("test-pkg-downloads", True) %>
<%- include "test-pkg-repo-downloads.yml.jinja" %>
<%- endif %>
@ -201,10 +206,10 @@ concurrency:
echo "::add-mask::$TWINE_PASSWORD"
echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}"
- name: Download Source Repository
- name: Download PyPi Artifacts
uses: actions/download-artifact@v3
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo
name: pypi-artifacts
path: artifacts/release
- name: Publish to Test PyPi

View file

@ -14,6 +14,9 @@
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
<%- if gh_environment == "release" %>
artifacts-from-workflow: staging.yml
<%- endif %>
secrets: inherit
@ -33,6 +36,9 @@
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
<%- if gh_environment == "release" %>
artifacts-from-workflow: staging.yml
<%- endif %>
secrets: inherit
@ -46,10 +52,13 @@
- publish-repositories
uses: ./.github/workflows/test-package-downloads-action-windows.yml
with:
distro-slug: windows-latest
distro-slug: windows-2022
platform: windows
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: <{ gh_environment }>
skip-code-coverage: true
<%- if gh_environment == "release" %>
artifacts-from-workflow: staging.yml
<%- endif %>
secrets: inherit

View file

@ -43,6 +43,13 @@ on:
type: string
description: The nox session to run
default: test-pkgs-onedir
artifacts-from-workflow:
required: false
type: string
description: >
Which workflow to download artifacts from. An empty string means the
current workflow run.
default: ""
env:
@ -98,12 +105,23 @@ jobs:
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
if: inputs.artifacts-from-workflow == ''
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Onedir Tarball as an Artifact(from a different workflow)
if: inputs.artifacts-from-workflow != ''
uses: dawidd6/action-download-artifact@v2
with:
workflow: ${{ inputs.artifacts-from-workflow }}
workflow_conclusion: ""
branch: ${{ github.event.ref }}
if_no_artifact_found: fail
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
@ -135,6 +153,14 @@ jobs:
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
@ -215,7 +241,8 @@ jobs:
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN: ${{ vars.SALT_REPO_DOMAIN || ( inputs.environment == 'staging' && 'staging.repo.saltproject.io' || 'repo.saltproject.io') }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs

View file

@ -43,6 +43,13 @@ on:
type: string
description: The nox session to run
default: test-pkgs-onedir
artifacts-from-workflow:
required: false
type: string
description: >
Which workflow to download artifacts from. An empty string means the
current workflow run.
default: ""
env:
@ -95,12 +102,23 @@ jobs:
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
if: inputs.artifacts-from-workflow == ''
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Onedir Tarball as an Artifact(from a different workflow)
if: inputs.artifacts-from-workflow != ''
uses: dawidd6/action-download-artifact@v2
with:
workflow: ${{ inputs.artifacts-from-workflow }}
workflow_conclusion: ""
branch: ${{ github.event.ref }}
if_no_artifact_found: fail
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
@ -148,6 +166,14 @@ jobs:
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
@ -235,7 +261,8 @@ jobs:
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN: ${{ vars.SALT_REPO_DOMAIN || ( inputs.environment == 'staging' && 'staging.repo.saltproject.io' || 'repo.saltproject.io') }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs

View file

@ -33,16 +33,28 @@ on:
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
nox-session:
required: false
type: string
description: The nox session to run
default: test-pkgs-onedir
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
skip-junit-reports:
required: false
type: boolean
description: Skip Publishing JUnit Reports
default: false
artifacts-from-workflow:
required: false
type: string
description: >
Which workflow to download artifacts from. An empty string means the
current workflow run.
default: ""
env:
@ -57,7 +69,10 @@ jobs:
generate-matrix:
name: Generate Package Test Matrix
runs-on: ubuntu-latest
runs-on:
- self-hosted
- linux
- x86_64
outputs:
arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }}
test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }}
@ -77,7 +92,10 @@ jobs:
name: Setup Test Dependencies
needs:
- generate-matrix
runs-on: ${{ inputs.distro-slug }}
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 90
strategy:
fail-fast: false
@ -95,49 +113,90 @@ jobs:
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
- name: Download Onedir Tarball as an Artifact
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
if: inputs.artifacts-from-workflow == ''
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Onedir Tarball as an Artifact(from a different workflow)
if: inputs.artifacts-from-workflow != ''
uses: dawidd6/action-download-artifact@v2
with:
workflow: ${{ inputs.artifacts-from-workflow }}
workflow_conclusion: ""
branch: ${{ github.event.ref }}
if_no_artifact_found: fail
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
shell: bash
run: |
py -3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
update-environment: true
uses: ./.github/actions/setup-python-tools-scripts
- name: Install Nox
- name: Get Salt Project GitHub Actions Bot Environment
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
pip install 'nox==${{ env.NOX_VERSION }}'
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
run: |
nox --force-color --install-only -e ${{ inputs.nox-session }}
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }}
- name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e "pre-archive-cleanup(pkg=False)"
tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy ${{ inputs.distro-slug }}
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
@ -147,12 +206,15 @@ jobs:
test:
name: Test
runs-on: ${{ inputs.distro-slug }}
runs-on:
- self-hosted
- linux
- bastion
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- dependencies
- generate-matrix
- dependencies
strategy:
fail-fast: false
matrix:
@ -166,84 +228,107 @@ jobs:
uses: actions/download-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
py -3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: "${{ inputs.python-version }}"
update-environment: true
- name: Install Nox
run: |
pip install 'nox==${{ env.NOX_VERSION }}'
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }}
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info & Test Plan
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "1"
PRINT_TEST_PLAN_ONLY: "1"
PRINT_SYSTEM_INFO: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
run: |
nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs
- name: Run Package Download Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
INSTALL_TYPE: ${{ matrix.install_type }}
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.install_arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN: ${{ vars.SALT_REPO_DOMAIN || ( inputs.environment == 'staging' && 'staging.repo.saltproject.io' || 'repo.saltproject.io') }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
run: |
nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs
- name: Run Package Download Tests
env:
INSTALL_TYPE: ${{ matrix.install_type }}
SALT_RELEASE: "${{ inputs.salt-version }}"
SALT_REPO_ARCH: ${{ matrix.install_arch }}
SALT_REPO_TYPE: ${{ inputs.environment }}
SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }}
SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }}
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled'
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e combine-coverage
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Prepare Test Run Artifacts
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && job.status != 'cancelled'
shell: powershell
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm artifacts/salt* -r -Force
tree artifacts /F /A
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy ${{ inputs.distro-slug }} || true
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Upload Test Run Artifacts
if: always() && job.status != 'cancelled'
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v3
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }}
@ -255,6 +340,7 @@ jobs:
report:
name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }})
runs-on: ubuntu-latest
environment: ${{ inputs.environment }}
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- test

View file

@ -151,6 +151,12 @@ jobs:
run: |
nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
name: Test
runs-on: ${{ inputs.distro-slug }}

View file

@ -166,6 +166,12 @@ jobs:
run: |
tools --timestamps vm destroy ${{ inputs.distro-slug }}
- name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v3
with:
name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: nox.${{ inputs.distro-slug }}.tar.*
test:
name: Test
runs-on:

1
changelog/63650.fixed.md Normal file
View file

@ -0,0 +1 @@
Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``)

1
changelog/63935.fixed.md Normal file
View file

@ -0,0 +1 @@
Windows pkg module now properly handles versions containing strings

1
changelog/63948.fixed.md Normal file
View file

@ -0,0 +1 @@
Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function.

1
changelog/63981.fixed.md Normal file
View file

@ -0,0 +1 @@
Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows

View file

@ -1205,7 +1205,42 @@ def decompress_dependencies(session):
)
session_run_always(session, "tar", "xpf", nox_dependencies_tarball)
nox_dependencies_tarball_path.unlink()
if os.environ.get("DELETE_NOX_ARCHIVE", "0") == "1":
nox_dependencies_tarball_path.unlink()
session.log("Finding broken 'python' symlinks under '.nox/' ...")
for dirname in os.scandir(REPO_ROOT / ".nox"):
if not IS_WINDOWS:
scan_path = REPO_ROOT.joinpath(".nox", dirname, "bin")
else:
scan_path = REPO_ROOT.joinpath(".nox", dirname, "Scripts")
script_paths = {str(p): p for p in os.scandir(scan_path)}
for key in sorted(script_paths):
path = script_paths[key]
if not path.is_symlink():
continue
broken_link = pathlib.Path(path)
resolved_link = os.readlink(path)
if not os.path.isabs(resolved_link):
# Relative symlinks, resolve them
resolved_link = os.path.join(scan_path, resolved_link)
if not os.path.exists(resolved_link):
session.log("The symlink %r looks to be broken", resolved_link)
# This is a broken link, fix it
resolved_link_suffix = resolved_link.split(
f"artifacts{os.sep}salt{os.sep}"
)[-1]
fixed_link = REPO_ROOT.joinpath(
"artifacts", "salt", resolved_link_suffix
)
session.log(
"Fixing broken symlink in nox virtualenv %r, from %r to %r",
dirname.name,
resolved_link,
str(fixed_link.relative_to(REPO_ROOT)),
)
broken_link.unlink()
broken_link.symlink_to(fixed_link)
@nox.session(python=False, name="compress-dependencies")

View file

@ -4,12 +4,12 @@ Test Salt Pkg Downloads
import logging
import os
import pathlib
import subprocess
import re
import shutil
import attr
import packaging
import pytest
import requests
from pytestskipmarkers.utils import platform
from saltfactories.utils import random_string
@ -165,51 +165,74 @@ def get_salt_test_commands():
@pytest.fixture(scope="module")
def pkg_container(salt_factories, download_test_image, root_url, salt_release):
def pkg_container(
salt_factories,
download_test_image,
root_url,
salt_release,
tmp_path_factory,
gpg_key_name,
):
downloads_path = tmp_path_factory.mktemp("downloads")
container = salt_factories.get_container(
random_string(f"{download_test_image.container_id}_"),
download_test_image.name,
pull_before_start=True,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
container_run_kwargs=dict(
volumes={
str(downloads_path): {"bind": "/downloads", "mode": "z"},
}
),
)
try:
container_setup_func = globals()[f"setup_{download_test_image.os_type}"]
except KeyError:
raise pytest.skip.Exception(
f"Unable to handle {pkg_container.os_type}. Skipping.",
_use_item_location=True,
)
container.before_terminate(shutil.rmtree, str(downloads_path), ignore_errors=True)
with container.started():
setup_func = globals()[f"setup_{download_test_image.os_type}"]
download_test_image.container = container
try:
cmds = setup_func(
container_setup_func(
container,
download_test_image.os_version,
download_test_image.os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
)
except KeyError:
pytest.skip(f"Unable to handle {pkg_container.os_type}. Skipping.")
for cmd in cmds:
res = container.run(cmd)
assert res.returncode == 0
download_test_image.container = container
yield download_test_image
yield download_test_image
except Exception as exc:
pytest.fail(f"Failed to setup {pkg_container.os_type}: {exc}")
@pytest.fixture(scope="module")
def root_url(salt_release):
repo_type = os.environ.get("SALT_REPO_TYPE", "staging")
repo_domain = os.environ.get("SALT_REPO_DOMAIN", "repo.saltproject.io")
if os.environ.get("SALT_REPO_TYPE", "release") == "staging":
repo_domain = os.environ.get(
"SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io"
)
else:
repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io")
if "rc" in salt_release:
salt_path = "salt_rc/salt"
else:
salt_path = "salt"
salt_repo_user = os.environ.get("SALT_REPO_USER")
if salt_repo_user:
log.warning(
log.info(
"SALT_REPO_USER: %s",
salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1],
)
salt_repo_pass = os.environ.get("SALT_REPO_PASS")
if salt_repo_pass:
log.warning(
log.info(
"SALT_REPO_PASS: %s",
salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1],
)
@ -221,11 +244,22 @@ def root_url(salt_release):
def get_salt_release():
if platform.is_darwin() or platform.is_windows():
_DEFAULT_RELEASE = "3005-1"
else:
_DEFAULT_RELEASE = "3005.1"
return os.environ.get("SALT_RELEASE", _DEFAULT_RELEASE)
salt_release = os.environ.get("SALT_RELEASE")
if salt_release is None:
log.warning(
"Setting salt release to 3006.0rc2 which is probably not what you want."
)
salt_release = "3006.0rc2"
if packaging.version.parse(salt_release) < packaging.version.parse("3006.0rc1"):
log.warning(f"The salt release being tested, {salt_release!r} looks off.")
return salt_release
@pytest.fixture(scope="module")
def gpg_key_name(salt_release):
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
return "SALT-PROJECT-GPG-PUBKEY-2023.pub"
return "salt-archive-keyring.gpg"
@pytest.fixture(scope="module")
@ -233,146 +267,222 @@ def salt_release():
yield get_salt_release()
def setup_amazon(os_version, os_codename, root_url, salt_release):
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
gpg_file = "SALT-PROJECT-GPG-PUBKEY-2023.pub"
else:
gpg_file = "salt-archive-keyring.gpg"
def setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
os_name,
gpg_key_name,
):
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
if arch == "aarch64":
arch = "arm64"
cmds = [
"pwd",
f"rpm --import {root_url}/amazon/2/{arch}/minor/{salt_release}/{gpg_file}",
f"curl -fsSL -o /etc/yum.repos.d/salt-amzn.repo {root_url}/amazon/2/{arch}/minor/{salt_release}.repo",
[
"sh",
"-c",
f"echo baseurl={root_url}/amazon/2/{arch}/minor/{salt_release} >> /etc/yum.repos.d/salt-amzn.repo",
],
[
"sh",
"-c",
f"echo gpgkey={root_url}/amazon/2/x86_64/minor/{salt_release}/{gpg_file} >> /etc/yum.repos.d/salt-amzn.repo",
],
"yum clean expire-cache",
"yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api",
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}"
gpg_file_url = f"{repo_url_base}/{gpg_key_name}"
try:
pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
except Exception as exc:
pytest.fail(f"Failed to download {gpg_file_url}: {exc}")
ret = container.run("rpm", "--import", f"/downloads/{gpg_key_name}")
if ret.returncode != 0:
pytest.fail("Failed to import gpg key")
repo_file = pytest.helpers.download_file(
f"{repo_url_base}.repo", downloads_path / f"salt-{os_name}.repo"
)
commands = [
("mv", f"/downloads/{repo_file.name}", f"/etc/yum.repos.d/salt-{os_name}.repo"),
("yum", "clean", "expire-cache"),
(
"yum",
"install",
"-y",
"salt-master",
"salt-minion",
"salt-ssh",
"salt-syndic",
"salt-cloud",
"salt-api",
),
]
return cmds
for cmd in commands:
ret = container.run(*cmd)
if ret.returncode != 0:
pytest.fail(f"Failed to run: {' '.join(cmd)!r}")
def setup_redhat(os_version, os_codename, root_url, salt_release):
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
gpg_file = "SALT-PROJECT-GPG-PUBKEY-2023.pub"
else:
gpg_file = "SALTSTACK-GPG-KEY2.pub"
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
if arch == "aarch64":
arch = "arm64"
cmds = [
f"rpm --import {root_url}/redhat/{os_version}/{arch}/minor/{salt_release}/{gpg_file}",
f"curl -fsSL -o /etc/yum.repos.d/salt.repo {root_url}/redhat/{os_version}/{arch}/minor/{salt_release}.repo",
[
"sh",
"-c",
f"echo baseurl={root_url}/redhat/{os_version}/{arch}/minor/{salt_release} >> /etc/yum.repos.d/salt.repo",
],
"yum clean expire-cache",
"yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api",
]
return cmds
def setup_amazon(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"amazon",
gpg_key_name,
)
def setup_fedora(os_version, os_codename, root_url, salt_release):
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
gpg_file = "SALT-PROJECT-GPG-PUBKEY-2023.pub"
else:
gpg_file = "SALTSTACK-GPG-KEY2.pub"
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
if arch == "aarch64":
arch = "arm64"
cmds = [
f"rpm --import {root_url}/fedora/{os_version}/{arch}/minor/{salt_release}/{gpg_file}"
f"curl -fsSL -o /etc/yum.repos.d/salt.repo {root_url}/fedora/{os_version}/{arch}/minor/{salt_release}.repo",
[
"sh",
"-c",
f"echo baseurl={root_url}/fedora/{os_version}/{arch}/minor/{salt_release} >> /etc/yum.repos.d/salt.repo",
],
[
"sh",
"-c",
f"echo gpgkey={root_url}/fedora/{os_version}/{arch}/minor/{salt_release}/{gpg_file} >> /etc/yum.repos.d/salt.repo",
],
"yum clean expire-cache",
"yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api",
]
return cmds
def setup_redhat(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"redhat",
gpg_key_name,
)
def setup_debian(os_version, os_codename, root_url, salt_release):
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
gpg_file = "SALT-PROJECT-GPG-PUBKEY-2023.gpg"
else:
gpg_file = "salt-archive-keyring.gpg"
def setup_fedora(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_redhat_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"fedora",
gpg_key_name,
)
def setup_debian_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
os_name,
gpg_key_name,
):
arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
if arch == "aarch64":
arch = "arm64"
elif arch == "x86_64":
arch = "amd64"
cmds = [
"apt-get update -y",
"apt-get install -y curl",
f"curl -fsSL -o /usr/share/keyrings/{gpg_file} {root_url}/debian/{os_version}/{arch}/minor/{salt_release}/{gpg_file}",
[
"sh",
"-c",
f'echo "deb [signed-by=/usr/share/keyrings/{gpg_file} arch={arch}] {root_url}/debian/{os_version}/{arch}/minor/{salt_release} {os_codename} main" > /etc/apt/sources.list.d/salt.list',
],
"apt-get update",
"apt-get install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api",
ret = container.run("apt-get", "update", "-y")
if ret.returncode != 0:
pytest.fail("Failed to run: 'apt-get update -y'")
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}"
gpg_file_url = f"{repo_url_base}/{gpg_key_name}"
try:
pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
except Exception as exc:
pytest.fail(f"Failed to download {gpg_file_url}: {exc}")
salt_sources_path = downloads_path / "salt.list"
salt_sources_path.write_text(
f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n"
)
commands = [
("mv", f"/downloads/{gpg_key_name}", f"/usr/share/keyrings/{gpg_key_name}"),
(
"mv",
f"/downloads/{salt_sources_path.name}",
"/etc/apt/sources.list.d/salt.list",
),
("apt-get", "install", "-y", "ca-certificates"),
("update-ca-certificates",),
("apt-get", "update"),
(
"apt-get",
"install",
"-y",
"salt-master",
"salt-minion",
"salt-ssh",
"salt-syndic",
"salt-cloud",
"salt-api",
),
]
return cmds
for cmd in commands:
ret = container.run(*cmd)
if ret.returncode != 0:
pytest.fail(f"Failed to run: {' '.join(cmd)!r}\n{ret}")
def setup_ubuntu(os_version, os_codename, root_url, salt_release):
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
gpg_file = "SALT-PROJECT-GPG-PUBKEY-2023.gpg"
else:
gpg_file = "salt-archive-keyring.gpg"
def setup_debian(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_debian_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"debian",
gpg_key_name,
)
arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
if arch == "aarch64":
arch = "arm64"
elif arch == "x86_64":
arch = "amd64"
cmds = [
"apt-get update -y",
"apt-get install -y curl",
f"curl -fsSL -o /usr/share/keyrings/{gpg_file} {root_url}/ubuntu/{os_version}/{arch}/minor/{salt_release}/{gpg_file}",
[
"sh",
"-c",
f'echo "deb [signed-by=/usr/share/keyrings/{gpg_file} arch={arch}] {root_url}/ubuntu/{os_version}/{arch}/minor/{salt_release} {os_codename} main" > /etc/apt/sources.list.d/salt.list',
],
"apt-get update",
"apt-get install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api",
]
return cmds
def setup_ubuntu(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
gpg_key_name,
):
setup_debian_family(
container,
os_version,
os_codename,
root_url,
salt_release,
downloads_path,
"ubuntu",
gpg_key_name,
)
@pytest.fixture(scope="module")
def setup_macos(root_url, salt_release):
def setup_macos(root_url, salt_release, shell):
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
if arch == "aarch64":
@ -384,33 +494,32 @@ def setup_macos(root_url, salt_release):
mac_pkg = f"salt-{salt_release}-py3-{arch}-unsigned.pkg"
else:
mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg"
# TODO: We still don't sign mac packages. Remove the line below when we do
mac_pkg = f"salt-{salt_release}-py3-{arch}-unsigned.pkg"
mac_pkg_url = f"{root_url}/macos/minor/{salt_release}/{mac_pkg}"
else:
mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}"
mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg"
mac_pkg_path = f"/tmp/{mac_pkg}"
pytest.helpers.download_file(mac_pkg_url, f"/tmp/{mac_pkg}")
# We should be able to issue a --help without being root
ret = subprocess.run(
["curl", "-fsSL", "-o", f"/tmp/{mac_pkg}", f"{mac_pkg_url}"],
ret = shell.run(
"installer",
"-pkg",
mac_pkg_path,
"-target",
"/",
check=False,
capture_output=True,
)
assert ret.returncode == 0
ret = subprocess.run(
["installer", "-pkg", mac_pkg_path, "-target", "/"],
check=False,
capture_output=True,
)
assert ret.returncode == 0
assert ret.returncode == 0, ret
yield
@pytest.fixture(scope="module")
def setup_windows(root_url, salt_release):
def setup_windows(root_url, salt_release, shell):
root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt")
@ -435,23 +544,18 @@ def setup_windows(root_url, salt_release):
pkg_path = pathlib.Path(r"C:\TEMP", win_pkg)
pkg_path.parent.mkdir(exist_ok=True)
ret = requests.get(win_pkg_url)
with open(pkg_path, "wb") as fp:
fp.write(ret.content)
ret = subprocess.run(
[pkg_path, "/start-minion=0", "/S"],
check=False,
capture_output=True,
)
assert ret.returncode == 0
pytest.helpers.download_file(win_pkg_url, pkg_path)
if install_type.lower() == "nsis":
ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False)
else:
ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""')
assert ret.returncode == 0, ret
log.debug("Removing installed salt-minion service")
ret = subprocess.run(
["cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm"],
check=False,
capture_output=True,
ret = shell.run(
"cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False
)
assert ret.returncode == 0
assert ret.returncode == 0, ret
@pytest.mark.skip_unless_on_linux
@ -466,23 +570,21 @@ def test_download_linux(salt_test_command, pkg_container, root_url, salt_release
@pytest.mark.skip_unless_on_darwin
@pytest.mark.usefixtures("setup_macos")
@pytest.mark.parametrize("salt_test_command", get_salt_test_commands())
def test_download_macos(salt_test_command, setup_macos):
def test_download_macos(salt_test_command, shell):
"""
Test downloading of Salt packages and running various commands on Mac OS hosts
"""
_cmd = salt_test_command.split()
ret = subprocess.run(
_cmd,
capture_output=True,
check=False,
)
assert ret.returncode == 0
ret = shell.run(*_cmd, check=False)
assert ret.returncode == 0, ret
@pytest.mark.skip_unless_on_windows
@pytest.mark.usefixtures("setup_windows")
@pytest.mark.parametrize("salt_test_command", get_salt_test_commands())
def test_download_windows(salt_test_command, setup_windows):
def test_download_windows(salt_test_command, shell):
"""
Test downloading of Salt packages and running various commands on Windows hosts
"""
@ -490,9 +592,5 @@ def test_download_windows(salt_test_command, setup_windows):
root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt")
_cmd[0] = str(root_dir / _cmd[0])
ret = subprocess.run(
_cmd,
capture_output=True,
check=False,
)
assert ret.returncode == 0
ret = shell.run(*_cmd, check=False)
assert ret.returncode == 0, ret

View file

@ -1694,11 +1694,13 @@ def remove_stale_master_key(master):
)
def download_file(url, dest):
@pytest.helpers.register
def download_file(url, dest, auth=None):
# NOTE the stream=True parameter below
with requests.get(url, stream=True) as r:
with requests.get(url, stream=True, auth=auth) as r:
r.raise_for_status()
with open(dest, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
return dest

View file

@ -65,7 +65,7 @@ from salt.exceptions import (
SaltInvocationError,
SaltRenderError,
)
from salt.utils.versions import Version
from salt.utils.versions import LooseVersion
log = logging.getLogger(__name__)
@ -1298,7 +1298,7 @@ def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose):
successful_verbose[short_path_name] = []
def _get_source_sum(source_hash, file_path, saltenv):
def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
"""
Extract the hash sum, whether it is in a remote hash file, or just a string.
"""
@ -1314,7 +1314,9 @@ def _get_source_sum(source_hash, file_path, saltenv):
if source_hash_scheme in schemes:
# The source_hash is a file on a server
try:
cached_hash_file = __salt__["cp.cache_file"](source_hash, saltenv)
cached_hash_file = __salt__["cp.cache_file"](
source_hash, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
)
except MinionError as exc:
log.exception("Failed to cache %s", source_hash, exc_info=exc)
raise
@ -1361,6 +1363,28 @@ def _get_msiexec(use_msiexec):
return True, "msiexec"
def normalize_name(name):
"""
Nothing to do on Windows. We need this function so that Salt doesn't go
through every module looking for ``pkg.normalize_name``.
.. versionadded:: 3006.0
Args:
name (str): The name of the package
Returns:
str: The name of the package
CLI Example:
.. code-block:: bash
salt '*' pkg.normalize_name git
"""
return name
def install(name=None, refresh=False, pkgs=None, **kwargs):
r"""
Install the passed package(s) on the system using winrepo
@ -1645,7 +1669,11 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
cached_file = __salt__["cp.is_cached"](cache_file, saltenv)
if not cached_file:
try:
cached_file = __salt__["cp.cache_file"](cache_file, saltenv)
cached_file = __salt__["cp.cache_file"](
cache_file,
saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(cache_file)
log.exception(msg, exc_info=exc)
@ -1656,7 +1684,11 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
"cp.hash_file"
](cached_file):
try:
cached_file = __salt__["cp.cache_file"](cache_file, saltenv)
cached_file = __salt__["cp.cache_file"](
cache_file,
saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(cache_file)
log.exception(msg, exc_info=exc)
@ -1673,7 +1705,9 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
if not cached_pkg:
# It's not cached. Cache it, mate.
try:
cached_pkg = __salt__["cp.cache_file"](installer, saltenv)
cached_pkg = __salt__["cp.cache_file"](
installer, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
)
except MinionError as exc:
msg = "Failed to cache {}".format(installer)
log.exception(msg, exc_info=exc)
@ -1694,7 +1728,11 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
"cp.hash_file"
](cached_pkg):
try:
cached_pkg = __salt__["cp.cache_file"](installer, saltenv)
cached_pkg = __salt__["cp.cache_file"](
installer,
saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(installer)
log.exception(msg, exc_info=exc)
@ -1716,7 +1754,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# Compare the hash sums
source_hash = pkginfo[version_num].get("source_hash", False)
if source_hash:
source_sum = _get_source_sum(source_hash, cached_pkg, saltenv)
source_sum = _get_source_sum(source_hash, cached_pkg, saltenv, **kwargs)
log.debug(
"pkg.install: Source %s hash: %s",
source_sum["hash_type"],
@ -1768,7 +1806,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# Install the software
# Check Use Scheduler Option
log.debug("PKG : cmd: %s /s /c %s", cmd_shell, arguments)
log.debug("PKG : cmd: %s /c %s", cmd_shell, arguments)
log.debug("PKG : pwd: %s", cache_path)
if pkginfo[version_num].get("use_scheduler", False):
# Create Scheduled Task
@ -1778,7 +1816,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
force=True,
action_type="Execute",
cmd=cmd_shell,
arguments='/s /c "{}"'.format(arguments),
arguments='/c "{}"'.format(arguments),
start_in=cache_path,
trigger_type="Once",
start_date="1975-01-01",
@ -1830,7 +1868,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
else:
# Launch the command
result = __salt__["cmd.run_all"](
'"{}" /s /c "{}"'.format(cmd_shell, arguments),
'"{}" /c "{}"'.format(cmd_shell, arguments),
cache_path,
output_loglevel="trace",
python_shell=False,
@ -2086,7 +2124,11 @@ def remove(name=None, pkgs=None, **kwargs):
if not cached_pkg:
# It's not cached. Cache it, mate.
try:
cached_pkg = __salt__["cp.cache_file"](uninstaller, saltenv)
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(uninstaller)
log.exception(msg, exc_info=exc)
@ -2106,7 +2148,11 @@ def remove(name=None, pkgs=None, **kwargs):
"cp.hash_file"
](cached_pkg):
try:
cached_pkg = __salt__["cp.cache_file"](uninstaller, saltenv)
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(uninstaller)
log.exception(msg, exc_info=exc)
@ -2126,7 +2172,7 @@ def remove(name=None, pkgs=None, **kwargs):
cached_pkg = cached_pkg.replace("/", "\\")
cache_path, _ = os.path.split(cached_pkg)
# os.path.expandvars is not required as we run everything through cmd.exe /s /c
# os.path.expandvars is not required as we run everything through cmd.exe /c
if kwargs.get("extra_uninstall_flags"):
uninstall_flags = "{} {}".format(
@ -2154,7 +2200,7 @@ def remove(name=None, pkgs=None, **kwargs):
# Uninstall the software
changed.append(pkgname)
# Check Use Scheduler Option
log.debug("PKG : cmd: %s /s /c %s", cmd_shell, arguments)
log.debug("PKG : cmd: %s /c %s", cmd_shell, arguments)
log.debug("PKG : pwd: %s", cache_path)
if pkginfo[target].get("use_scheduler", False):
# Create Scheduled Task
@ -2164,7 +2210,7 @@ def remove(name=None, pkgs=None, **kwargs):
force=True,
action_type="Execute",
cmd=cmd_shell,
arguments='/s /c "{}"'.format(arguments),
arguments='/c "{}"'.format(arguments),
start_in=cache_path,
trigger_type="Once",
start_date="1975-01-01",
@ -2181,7 +2227,7 @@ def remove(name=None, pkgs=None, **kwargs):
else:
# Launch the command
result = __salt__["cmd.run_all"](
'"{}" /s /c "{}"'.format(cmd_shell, arguments),
'"{}" /c "{}"'.format(cmd_shell, arguments),
output_loglevel="trace",
python_shell=False,
redirect_stderr=True,
@ -2359,7 +2405,7 @@ def _reverse_cmp_pkg_versions(pkg1, pkg2):
"""
Compare software package versions
"""
return 1 if Version(pkg1) > Version(pkg2) else -1
return 1 if LooseVersion(pkg1) > LooseVersion(pkg2) else -1
def _get_latest_pkg_version(pkginfo):

View file

@ -19,6 +19,7 @@ from salt.exceptions import ArgumentValueError, CommandExecutionError
try:
import pythoncom
import pywintypes
import win32com.client
HAS_DEPENDENCIES = True
@ -359,7 +360,13 @@ def list_tasks(location="\\"):
task_service.Connect()
# Get the folder to list tasks from
task_folder = task_service.GetFolder(location)
try:
task_folder = task_service.GetFolder(location)
except pywintypes.com_error:
msg = "Unable to load location: {}".format(location)
log.error(msg)
raise CommandExecutionError(msg)
tasks = task_folder.GetTasks(0)
ret = []
@ -1129,12 +1136,13 @@ def edit_task(
# TODO: Check triggers for end_boundary
if delete_after is False:
task_definition.Settings.DeleteExpiredTaskAfter = ""
if delete_after in duration:
task_definition.Settings.DeleteExpiredTaskAfter = _lookup_first(
duration, delete_after
)
else:
return 'Invalid value for "delete_after"'
if delete_after in duration:
task_definition.Settings.DeleteExpiredTaskAfter = _lookup_first(
duration, delete_after
)
else:
return 'Invalid value for "delete_after"'
if multiple_instances is not None:
task_definition.Settings.MultipleInstances = instances[multiple_instances]
@ -1567,6 +1575,16 @@ def info(name, location="\\"):
trigger["delay"] = _reverse_lookup(duration, triggerObj.Delay)
else:
trigger["delay"] = False
if hasattr(triggerObj, "Repetition"):
trigger["repeat_duration"] = _reverse_lookup(
duration, triggerObj.Repetition.Duration
)
trigger["repeat_interval"] = _reverse_lookup(
duration, triggerObj.Repetition.Interval
)
trigger[
"repeat_stop_at_duration_end"
] = triggerObj.Repetition.StopAtDurationEnd
triggers.append(trigger)
properties["settings"] = settings

View file

@ -16,6 +16,7 @@ import copy
import datetime
import fnmatch
import importlib
import inspect
import logging
import os
import random
@ -2374,11 +2375,15 @@ class State:
*cdata["args"], **cdata["kwargs"]
)
self.states.inject_globals = {}
if (
"check_cmd" in low
and "{0[state]}.mod_run_check_cmd".format(low) not in self.states
):
ret.update(self._run_check_cmd(low))
if "check_cmd" in low:
state_check_cmd = "{0[state]}.mod_run_check_cmd".format(low)
state_func = "{0[state]}.{0[fun]}".format(low)
state_func_sig = inspect.signature(self.states[state_func])
if state_check_cmd not in self.states:
ret.update(self._run_check_cmd(low))
else:
if "check_cmd" not in state_func_sig.parameters:
ret.update(self._run_check_cmd(low))
except Exception as exc: # pylint: disable=broad-except
log.debug(
"An exception occurred in this state: %s",

View file

@ -760,7 +760,7 @@ def _find_install_targets(
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
version_string, saltenv=kwargs["saltenv"]
version_string, saltenv=kwargs["saltenv"], **kwargs
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))

View file

@ -138,3 +138,25 @@ def test_issue_1896_file_append_source(file, tmp_path, state_tree):
testfile_contents = testfile.read_text()
assert testfile_contents == FIRST_IF_CONTENTS + SECOND_IF_CONTENTS
def test_file_append_check_cmd(modules, state_tree, tmp_path):
"""
Test that check_cmd works for file.append
and those states do not run.
"""
sls_contents = f"""
append_in_file:
file.append:
- name: /tmp/test
- text: "appended text"
- check_cmd:
- "djasjahj"
"""
with pytest.helpers.temp_file(
"file-append-check-cmd.sls", sls_contents, state_tree
):
ret = modules.state.sls("file-append-check-cmd")
for state_run in ret:
assert state_run.result is False
assert state_run.comment == "check_cmd determined the state failed"

View file

@ -376,3 +376,27 @@ def test_file_replace_prerequired_issues_55775(modules, state_tree, tmp_path):
assert state_run.result is True
assert managed_file.exists()
def test_file_replace_check_cmd(modules, state_tree, tmp_path):
"""
Test that check_cmd works for file.replace
and those states do not run.
"""
sls_contents = f"""
replace_in_file:
file.replace:
- name: /tmp/test
- pattern: hi
- repl: "replacement text"
- append_if_not_found: True
- check_cmd:
- "djasjahj"
"""
with pytest.helpers.temp_file(
"file-replace-check-cmd.sls", sls_contents, state_tree
):
ret = modules.state.sls("file-replace-check-cmd")
for state_run in ret:
assert state_run.result is False
assert state_run.comment == "check_cmd determined the state failed"

View file

@ -0,0 +1,44 @@
import pytest
from saltfactories.utils import random_string
@pytest.fixture(scope="package")
def salt_master_factory(salt_factories):
factory = salt_factories.salt_master_daemon(
random_string("reauth-master-"),
extra_cli_arguments_after_first_start_failure=["--log-level=info"],
)
return factory
@pytest.fixture(scope="package")
def salt_master(salt_master_factory):
with salt_master_factory.started():
yield salt_master_factory
@pytest.fixture(scope="package")
def salt_minion_factory(salt_master):
factory = salt_master.salt_minion_daemon(
random_string("reauth-minion-"),
extra_cli_arguments_after_first_start_failure=["--log-level=info"],
)
return factory
@pytest.fixture(scope="package")
def salt_minion(salt_minion_factory):
with salt_minion_factory.started():
yield salt_minion_factory
@pytest.fixture(scope="package")
def salt_key_cli(salt_master):
assert salt_master.is_running()
return salt_master.salt_key_cli()
@pytest.fixture(scope="package")
def salt_cli(salt_master):
assert salt_master.is_running()
return salt_master.salt_cli()

View file

@ -0,0 +1,61 @@
import logging
import os
import threading
import time
import pytest
pytestmark = [
pytest.mark.slow_test,
pytest.mark.windows_whitelisted,
]
log = logging.getLogger(__name__)
def minion_func(salt_minion, event_listener, salt_master, timeout):
start = time.time()
with salt_minion.started(start_timeout=timeout * 2, max_start_attempts=1):
new_start = time.time()
while time.time() < new_start + (timeout * 2):
if event_listener.get_events(
[(salt_master.id, f"salt/job/*/ret/{salt_minion.id}")],
after_time=start,
):
break
time.sleep(5)
@pytest.fixture(scope="module")
def timeout():
return int(os.environ.get("SALT_CI_REAUTH_MASTER_WAIT", 150))
def test_reauth(salt_cli, salt_minion, salt_master, timeout, event_listener):
# Make sure they can communicate
assert salt_cli.run("test.ping", minion_tgt=salt_minion.id).data is True
# Stop the master and minion
salt_master.terminate()
salt_minion.terminate()
log.debug(
"Master and minion stopped for reauth test, waiting for %s seconds", timeout
)
log.debug("Restarting the reauth minion")
# We need to have the minion attempting to start in a different process
# when we try to start the master
minion_proc = threading.Thread(
target=minion_func, args=(salt_minion, event_listener, salt_master, timeout)
)
minion_proc.start()
time.sleep(timeout)
log.debug("Restarting the reauth master")
start = time.time()
salt_master.start()
event_listener.wait_for_events(
[(salt_master.id, f"salt/minion/{salt_minion.id}/start")],
after_time=start,
timeout=timeout * 2,
)
assert salt_cli.run("test.ping", minion_tgt=salt_minion.id).data is True
minion_proc.join()

View file

@ -241,6 +241,33 @@ def test_pkg_install_name():
assert "-e True -test_flag True" in str(mock_cmd_run_all.call_args[0])
def test_pkg_install_verify_ssl_false():
"""
test pkg.install using verify_ssl=False
"""
ret_reg = {"Nullsoft Install System": "3.03"}
# The 2nd time it's run, pkg.list_pkgs uses with stringify
se_list_pkgs = [{"nsis": ["3.03"]}, {"nsis": "3.02"}]
mock_cp = MagicMock(return_value="C:\\fake\\path.exe")
with patch.object(win_pkg, "list_pkgs", side_effect=se_list_pkgs), patch.object(
win_pkg, "_get_reg_software", return_value=ret_reg
), patch.dict(
win_pkg.__salt__, {"cp.is_cached": MagicMock(return_value=False)}
), patch.dict(
win_pkg.__salt__, {"cp.cache_file": mock_cp}
), patch.dict(
win_pkg.__salt__, {"cmd.run_all": MagicMock(return_value={"retcode": 0})}
):
expected = {"nsis": {"new": "3.02", "old": "3.03"}}
result = win_pkg.install(name="nsis", version="3.02", verify_ssl=False)
mock_cp.assert_called_once_with(
"http://download.sourceforge.net/project/nsis/NSIS%203/3.02/nsis-3.02-setup.exe",
"base",
verify_ssl=False,
)
assert expected == result
def test_pkg_install_single_pkg():
"""
test pkg.install pkg with extra_install_flags
@ -321,7 +348,7 @@ def test_pkg_install_log_message(caplog):
extra_install_flags="-e True -test_flag True",
)
assert (
'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /s /c "runme.exe" /s -e '
'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /c "runme.exe" /s -e '
"True -test_flag True"
).lower() in [x.lower() for x in caplog.messages]
assert "PKG : pwd: ".lower() in [x.lower() for x in caplog.messages]
@ -540,7 +567,7 @@ def test_pkg_remove_log_message(caplog):
pkgs=["firebox"],
)
assert (
'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /s /c "%program.exe" /S'
'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /c "%program.exe" /S'
).lower() in [x.lower() for x in caplog.messages]
assert "PKG : pwd: ".lower() in [x.lower() for x in caplog.messages]
assert "PKG : retcode: 0" in caplog.messages
@ -629,3 +656,17 @@ def test_pkg_remove_minion_error_salt():
)
assert ret == expected
@pytest.mark.parametrize(
"v1,v2,expected",
(
("2.24.0", "2.23.0.windows.1", 1),
("2.23.0.windows.2", "2.23.0.windows.1", 1),
),
)
def test__reverse_cmp_pkg_versions(v1, v2, expected):
result = win_pkg._reverse_cmp_pkg_versions(v1, v2)
assert result == expected, "cmp({}, {}) should be {}, got {}".format(
v1, v2, expected, result
)

View file

@ -0,0 +1,86 @@
"""
Test the win_task execution module
"""
from datetime import datetime
import pytest
import salt.modules.win_task as win_task
pytestmark = [
pytest.mark.skip_unless_on_windows,
pytest.mark.destructive_test,
]
@pytest.fixture(scope="function")
def base_task():
task_name = "SaltTest"
result = win_task.create_task(
task_name,
user_name="System",
force=True,
action_type="Execute",
cmd="c:\\salt\\salt-call.bat",
)
assert result is True
yield task_name
result = win_task.delete_task(task_name)
assert result is True
def test_repeat_interval(base_task):
result = win_task.add_trigger(
base_task,
trigger_type="Daily",
trigger_enabled=True,
repeat_duration="30 minutes",
repeat_interval="30 minutes",
)
assert result is True
result = win_task.info(base_task)
assert result["triggers"][0]["enabled"] is True
assert result["triggers"][0]["trigger_type"] == "Daily"
assert result["triggers"][0]["repeat_duration"] == "30 minutes"
assert result["triggers"][0]["repeat_interval"] == "30 minutes"
def test_repeat_interval_and_indefinitely(base_task):
result = win_task.add_trigger(
base_task,
trigger_type="Daily",
trigger_enabled=True,
repeat_duration="Indefinitely",
repeat_interval="30 minutes",
)
assert result is True
result = win_task.info(base_task)
assert result["triggers"][0]["enabled"] is True
assert result["triggers"][0]["trigger_type"] == "Daily"
assert result["triggers"][0]["repeat_duration"] == "Indefinitely"
assert result["triggers"][0]["repeat_interval"] == "30 minutes"
def test_edit_task_delete_after(base_task):
result = win_task.add_trigger(
base_task,
trigger_type="Daily",
trigger_enabled=True,
end_date=datetime.today().strftime("%Y-%m-%d"),
end_time="23:59:59",
)
assert result is True
result = win_task.edit_task(base_task, delete_after="30 days")
assert result is True
result = win_task.info(base_task)
assert result["settings"]["delete_after"] == "30 days"
result = win_task.edit_task(base_task, delete_after=False)
assert result is True
result = win_task.info(base_task)
assert result["settings"]["delete_after"] is False

View file

@ -77,6 +77,8 @@ def test_cmp_strict(v1, v2, wanted):
# Added by us
("3.10.0-514.el7", "3.10.0-514.6.1.el7", 1),
("2.2.2", "2.12.1", -1),
("2.24.0", "2.23.0.windows.1", 1),
("2.23.0.windows.2", "2.23.0.windows.1", 1),
),
)
def test_cmp(v1, v2, wanted):

View file

@ -1,56 +0,0 @@
import pytest
import salt.modules.win_task as win_task
from tests.support.unit import TestCase
@pytest.mark.skip_unless_on_windows
@pytest.mark.destructive_test
class WinTaskTestCase(TestCase):
"""
Test cases for salt.modules.win_task
"""
def test_repeat_interval(self):
task_name = "SaltTest1"
try:
ret = win_task.create_task(
task_name,
user_name="System",
force=True,
action_type="Execute",
cmd="c:\\salt\\salt-call.bat",
trigger_type="Daily",
trigger_enabled=True,
repeat_duration="30 minutes",
repeat_interval="30 minutes",
)
self.assertTrue(ret)
ret = win_task.info(task_name)
self.assertEqual(ret["triggers"][0]["trigger_type"], "Daily")
finally:
ret = win_task.delete_task(task_name)
self.assertTrue(ret)
def test_repeat_interval_and_indefinitely(self):
task_name = "SaltTest2"
try:
ret = win_task.create_task(
task_name,
user_name="System",
force=True,
action_type="Execute",
cmd="c:\\salt\\salt-call.bat",
trigger_type="Daily",
trigger_enabled=True,
repeat_duration="Indefinitely",
repeat_interval="30 minutes",
)
self.assertTrue(ret)
ret = win_task.info(task_name)
self.assertEqual(ret["triggers"][0]["trigger_type"], "Daily")
finally:
ret = win_task.delete_task(task_name)
self.assertTrue(ret)

View file

@ -363,12 +363,6 @@ def define_jobs(
wfh.write("Selected Jobs:\n")
for name, value in sorted(jobs.items()):
wfh.write(f" - {name}: {value}\n")
wfh.write(
"\n<details>\n<summary>All Changed Files (click me)</summary>\n<pre>\n"
)
for path in sorted(json.loads(changed_files_contents["repo_files"])):
wfh.write(f"{path}\n")
wfh.write("</pre>\n</details>\n")
ctx.info("Writing 'jobs' to the github outputs file")
with open(github_output, "a", encoding="utf-8") as wfh:

View file

@ -10,6 +10,7 @@ import json
import logging
import os
import pathlib
import re
import shutil
import sys
import tempfile
@ -124,9 +125,6 @@ _deb_distro_info = {
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def debian(
@ -139,7 +137,6 @@ def debian(
key_id: str = None,
distro_arch: str = "amd64",
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the debian repository.
@ -206,7 +203,6 @@ def debian(
distro,
distro_version=distro_version,
distro_arch=distro_arch,
rc_build=rc_build,
nightly_build=nightly_build,
)
@ -370,9 +366,6 @@ _rpm_distro_info = {
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def rpm(
@ -385,7 +378,6 @@ def rpm(
key_id: str = None,
distro_arch: str = "amd64",
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the redhat repository.
@ -413,7 +405,6 @@ def rpm(
distro,
distro_version=distro_version,
distro_arch=distro_arch,
rc_build=rc_build,
nightly_build=nightly_build,
)
@ -461,18 +452,40 @@ def rpm(
else:
ctx.run("createrepo", ".", cwd=create_repo_path)
if nightly_build:
repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io")
else:
repo_domain = os.environ.get(
"SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io"
)
salt_repo_user = os.environ.get("SALT_REPO_USER")
if salt_repo_user:
log.info(
"SALT_REPO_USER: %s",
salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1],
)
salt_repo_pass = os.environ.get("SALT_REPO_PASS")
if salt_repo_pass:
log.info(
"SALT_REPO_PASS: %s",
salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1],
)
if salt_repo_user and salt_repo_pass:
repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}"
def _create_repo_file(create_repo_path, url_suffix):
ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...")
if nightly_build:
base_url = "salt-dev/"
repo_file_contents = "[salt-nightly-repo]"
elif rc_build:
elif "rc" in salt_version:
base_url = "salt_rc/"
repo_file_contents = "[salt-rc-repo]"
else:
base_url = ""
repo_file_contents = "[salt-repo]"
base_url += f"salt/py3/{distro}/{url_suffix}"
base_url += f"salt/py3/{distro}/{distro_version}/{distro_arch}/{url_suffix}"
if distro == "amazon":
distro_name = "Amazon Linux"
elif distro == "redhat":
@ -488,13 +501,13 @@ def rpm(
repo_file_contents += textwrap.dedent(
f"""
name=Salt repo for {distro_name} {distro_version} PY3
baseurl=https://repo.saltproject.io/{base_url}
baseurl=https://{repo_domain}/{base_url}
skip_if_unavailable=True{failovermethod}
priority=10
enabled=1
enabled_metadata=1
gpgcheck=1
gpgkey={base_url}/{tools.utils.GPG_KEY_FILENAME}.pub
gpgkey=https://{repo_domain}/{base_url}/{tools.utils.GPG_KEY_FILENAME}.pub
"""
)
create_repo_path.write_text(repo_file_contents)
@ -504,9 +517,9 @@ def rpm(
else:
repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo"
_create_repo_file(repo_file_path, salt_version)
_create_repo_file(repo_file_path, f"minor/{salt_version}")
if nightly_build is False and rc_build is False:
if nightly_build is False:
remote_versions = _get_remote_versions(
tools.utils.STAGING_BUCKET_NAME,
create_repo_path.parent.relative_to(repo_path),
@ -564,9 +577,6 @@ def rpm(
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def windows(
@ -576,7 +586,6 @@ def windows(
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the windows repository.
@ -590,7 +599,6 @@ def windows(
ctx,
salt_version=salt_version,
nightly_build=nightly_build,
rc_build=rc_build,
repo_path=repo_path,
incoming=incoming,
key_id=key_id,
@ -625,9 +633,6 @@ def windows(
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def macos(
@ -637,7 +642,6 @@ def macos(
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the windows repository.
@ -651,7 +655,6 @@ def macos(
ctx,
salt_version=salt_version,
nightly_build=nightly_build,
rc_build=rc_build,
repo_path=repo_path,
incoming=incoming,
key_id=key_id,
@ -686,9 +689,6 @@ def macos(
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def onedir(
@ -698,7 +698,6 @@ def onedir(
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the onedir repository.
@ -712,7 +711,6 @@ def onedir(
ctx,
salt_version=salt_version,
nightly_build=nightly_build,
rc_build=rc_build,
repo_path=repo_path,
incoming=incoming,
key_id=key_id,
@ -747,9 +745,6 @@ def onedir(
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def src(
@ -759,7 +754,6 @@ def src(
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the onedir repository.
@ -839,91 +833,64 @@ def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None):
_publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version)
@repo.command(
name="backup-previous-releases",
arguments={
"salt_version": {
"help": "The salt version for which to build the repository",
"required": True,
},
},
)
def backup_previous_releases(ctx: Context, salt_version: str = None):
@repo.command(name="backup-previous-releases")
def backup_previous_releases(ctx: Context):
"""
Backup previous releases.
Backup release bucket.
"""
s3 = boto3.client("s3")
backup_file_relpath = f"release-artifacts/{salt_version}/.release-backup-done"
try:
ctx.info(
f"Getting information if a backup for {salt_version} was already done..."
)
s3.head_object(
Key=backup_file_relpath,
Bucket=tools.utils.STAGING_BUCKET_NAME,
)
ctx.info(f"A backup prior to releasing {salt_version} has already been done.")
ctx.exit(0)
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
files_in_backup: dict[str, datetime] = {}
files_to_backup: list[tuple[str, datetime]] = []
ctx.info("Grabbing remote listing of files in backup ...")
for entry in _get_repo_detailed_file_list(
bucket_name=tools.utils.BACKUP_BUCKET_NAME,
):
files_in_backup[entry["Key"]] = entry["LastModified"]
ctx.info("Grabbing remote listing of files to backup ...")
for entry in _get_repo_detailed_file_list(
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
):
files_to_backup.append((entry["Key"], entry["LastModified"]))
with tools.utils.create_progress_bar() as progress:
task = progress.add_task(
"Back up previous releases", total=len(files_to_backup)
)
for fpath, last_modified in files_to_backup:
try:
last_modified_backup = files_in_backup.get(fpath)
if last_modified_backup and last_modified_backup >= last_modified:
ctx.info(f" * Skipping unmodified {fpath}")
continue
ctx.info(f" * Backup {fpath}")
s3.copy_object(
Bucket=tools.utils.BACKUP_BUCKET_NAME,
Key=fpath,
CopySource={
"Bucket": tools.utils.RELEASE_BUCKET_NAME,
"Key": fpath,
},
MetadataDirective="COPY",
TaggingDirective="COPY",
ServerSideEncryption="AES256",
)
except ClientError as exc:
if "PreconditionFailed" not in str(exc):
log.exception(f"Failed to copy {fpath}")
finally:
progress.update(task, advance=1)
s3.put_object(
Key=backup_file_relpath,
Bucket=tools.utils.STAGING_BUCKET_NAME,
Body=b"",
Metadata={
"x-amz-meta-salt-release-version": salt_version,
},
)
_rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME)
ctx.info("Done")
@repo.command(name="restore-previous-releases")
def restore_previous_releases(ctx: Context):
"""
Restore release bucket from backup.
"""
_rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME)
ctx.info("Done")
def _rclone(ctx: Context, src: str, dst: str):
rclone = shutil.which("rclone")
if not rclone:
ctx.error("Could not find the rclone binary")
ctx.exit(1)
if TYPE_CHECKING:
assert rclone
env = os.environ.copy()
env["RCLONE_CONFIG_S3_TYPE"] = "s3"
cmdline: list[str] = [
rclone,
"sync",
"--auto-confirm",
"--human-readable",
"--checksum",
"--color=always",
"--metadata",
"--s3-env-auth",
"--s3-location-constraint=us-west-2",
"--s3-provider=AWS",
"--s3-region=us-west-2",
"--stats-file-name-length=0",
"--stats-one-line",
"--stats=5s",
"--transfers=50",
"--fast-list",
"--verbose",
]
if src == tools.utils.RELEASE_BUCKET_NAME:
cmdline.append("--s3-storage-class=INTELLIGENT_TIERING")
cmdline.extend([f"s3://{src}", f"s3://{dst}"])
ctx.info(f"Running: {' '.join(cmdline)}")
ret = ctx.run(*cmdline, env=env, check=False)
if ret.returncode:
ctx.error(f"Failed to sync from s3://{src} to s3://{dst}")
ctx.exit(1)
@publish.command(
arguments={
"salt_version": {
@ -1030,6 +997,7 @@ def release(ctx: Context, salt_version: str):
already_copied_files: list[str] = []
s3 = boto3.client("s3")
dot_repo_files = []
with tools.utils.create_progress_bar() as progress:
task = progress.add_task(
"Copying files between buckets", total=len(files_to_copy)
@ -1037,6 +1005,8 @@ def release(ctx: Context, salt_version: str):
for fpath in files_to_copy:
if fpath in already_copied_files:
continue
if fpath.endswith(".repo"):
dot_repo_files.append(fpath)
ctx.info(f" * Copying {fpath}")
try:
s3.copy_object(
@ -1065,7 +1035,7 @@ def release(ctx: Context, salt_version: str):
create_repo_path = _create_repo_path(
repo_path,
salt_version,
distro,
distro=distro,
)
repo_json_path = create_repo_path.parent.parent / "repo.json"
@ -1133,6 +1103,51 @@ def release(ctx: Context, salt_version: str):
ctx.info(f"Writing {repo_json_path} ...")
repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True))
# And now, let's get the several rpm "*.repo" files to update the base
# domain from staging to release
release_domain = os.environ.get(
"SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io"
)
for path in dot_repo_files:
repo_file_path = repo_path.joinpath(path)
repo_file_path.parent.mkdir(exist_ok=True, parents=True)
bucket_name = tools.utils.STAGING_BUCKET_NAME
try:
ret = s3.head_object(Bucket=bucket_name, Key=path)
ctx.info(
f"Downloading existing '{repo_file_path.relative_to(repo_path)}' "
f"file from bucket {bucket_name}"
)
size = ret["ContentLength"]
with repo_file_path.open("wb") as wfh:
with tools.utils.create_progress_bar(
file_progress=True
) as progress:
task = progress.add_task(
description="Downloading...", total=size
)
s3.download_fileobj(
Bucket=bucket_name,
Key=path,
Fileobj=wfh,
Callback=tools.utils.UpdateProgress(progress, task),
)
updated_contents = re.sub(
r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$",
rf"\1=https://{release_domain}/\3",
repo_file_path.read_text(),
flags=re.MULTILINE,
)
ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:")
ctx.print(updated_contents)
repo_file_path.write_text(updated_contents)
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
ctx.info(f"Cloud not find {repo_file_path} in bucket {bucket_name}")
for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True):
for path in filenames:
upload_path = pathlib.Path(dirpath, path)
@ -1424,7 +1439,6 @@ def _create_onedir_based_repo(
ctx: Context,
salt_version: str,
nightly_build: bool,
rc_build: bool,
repo_path: pathlib.Path,
incoming: pathlib.Path,
key_id: str,
@ -1433,7 +1447,7 @@ def _create_onedir_based_repo(
):
ctx.info("Creating repository directory structure ...")
create_repo_path = _create_repo_path(
repo_path, salt_version, distro, rc_build=rc_build, nightly_build=nightly_build
repo_path, salt_version, distro, nightly_build=nightly_build
)
if nightly_build is False:
repo_json_path = create_repo_path.parent.parent / "repo.json"
@ -1542,7 +1556,8 @@ def _create_onedir_based_repo(
)
minor_versions = [v for v in versions if v.major == major_version]
ctx.info(
f"Collected versions(Matching major: {major_version}) from {minor_repo_json_path.relative_to(repo_path)}: "
f"Collected versions(Matching major: {major_version}) from "
f"{minor_repo_json_path.relative_to(repo_path)}: "
f"{', '.join(str(vs) for vs in minor_versions)}"
)
if not versions:
@ -1576,7 +1591,7 @@ def _create_onedir_based_repo(
minor_repo_json["latest"] = release_json
# This is the latest minor, update the major in the top level repo.json
# to this version
repo_json[str(salt_version)] = release_json
repo_json[str(major_version)] = release_json
ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...")
if major_link.exists():
major_link.unlink()
@ -1607,7 +1622,8 @@ def _get_repo_json_file_contents(
Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path))
)
ctx.info(
f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file from bucket {bucket_name}"
f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file "
f"from bucket {bucket_name}"
)
size = ret["ContentLength"]
with repo_json_path.open("wb") as wfh:
@ -1626,6 +1642,9 @@ def _get_repo_json_file_contents(
raise
if exc.response["Error"]["Code"] != "404":
raise
ctx.info(f"Cloud not find {repo_json_path} in bucket {bucket_name}")
if repo_json:
ctx.print(repo_json, soft_wrap=True)
return repo_json
@ -1774,13 +1793,12 @@ def _create_repo_path(
distro: str,
distro_version: str | None = None, # pylint: disable=bad-whitespace
distro_arch: str | None = None, # pylint: disable=bad-whitespace
rc_build: bool = False,
nightly_build: bool = False,
):
create_repo_path = repo_path
if nightly_build:
create_repo_path = create_repo_path / "salt-dev"
elif rc_build:
elif "rc" in salt_version:
create_repo_path = create_repo_path / "salt_rc"
create_repo_path = create_repo_path / "salt" / "py3" / distro
if distro_version:

View file

@ -63,7 +63,7 @@ def generate_workflows(ctx: Context):
"slug": "staging",
"template": "staging.yml",
"includes": {
"test-pkg-downloads": False,
"test-pkg-downloads": True,
},
},
"Scheduled": {
@ -76,7 +76,7 @@ def generate_workflows(ctx: Context):
"lint": False,
"pkg-tests": False,
"salt-tests": False,
"test-pkg-downloads": False,
"test-pkg-downloads": True,
},
},
}

View file

@ -272,6 +272,17 @@ def rsync(ctx: Context, name: str):
"--skip-code-coverage",
],
},
"envvars": {
"action": "append",
"flags": [
"-E",
"--env",
],
"help": (
"Environment variable name to forward when running tests. Example: "
"'-E VAR1 -E VAR2'."
),
},
}
)
def test(
@ -284,6 +295,7 @@ def test(
print_tests_selection: bool = False,
print_system_info: bool = False,
skip_code_coverage: bool = False,
envvars: list[str] = None,
):
"""
Run test in the VM.
@ -318,6 +330,12 @@ def test(
if "photonos" in name:
skip_known_failures = os.environ.get("SKIP_INITIAL_PHOTONOS_FAILURES", "1")
env["SKIP_INITIAL_PHOTONOS_FAILURES"] = skip_known_failures
if envvars:
for key in envvars:
if key not in os.environ:
ctx.warn(f"Environment variable {key!r} not set. Not forwarding")
continue
env[key] = os.environ[key]
returncode = vm.run_nox(
nox_session=nox_session,
session_args=nox_session_args,
@ -352,6 +370,17 @@ def test(
"--skip-requirements-install",
],
},
"envvars": {
"action": "append",
"flags": [
"-E",
"--env",
],
"help": (
"Environment variable name to forward when running tests. Example: "
"'-E VAR1 -E VAR2'."
),
},
}
)
def testplan(
@ -360,6 +389,7 @@ def testplan(
nox_session_args: list[str] = None,
nox_session: str = "ci-test-3",
skip_requirements_install: bool = False,
envvars: list[str] = None,
):
"""
Run test in the VM.
@ -379,6 +409,12 @@ def testplan(
if "photonos" in name:
skip_known_failures = os.environ.get("SKIP_INITIAL_PHOTONOS_FAILURES", "1")
env["SKIP_INITIAL_PHOTONOS_FAILURES"] = skip_known_failures
if envvars:
for key in envvars:
if key not in os.environ:
ctx.warn(f"Environment variable {key!r} not set. Not forwarding")
continue
env[key] = os.environ[key]
returncode = vm.run_nox(
nox_session=nox_session,
session_args=nox_session_args,
@ -1199,7 +1235,10 @@ class VM:
"""
Decompress nox.<vm-name>.tar.* if it exists in the VM
"""
return self.run_nox("decompress-dependencies", session_args=[self.name])
env = {"DELETE_NOX_ARCHIVE": "1"}
return self.run_nox(
"decompress-dependencies", session_args=[self.name], env=env
)
def download_dependencies(self):
"""