mirror of
https://github.com/saltstack/salt.git
synced 2025-04-10 23:01:39 +00:00
Merge branch '3006.x' into merge/3007.x/3006.x
This commit is contained in:
commit
9f1647d0f6
144 changed files with 4676 additions and 12093 deletions
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
|
@ -6,6 +6,3 @@ contact_links:
|
||||||
- name: Salt-Users Forum
|
- name: Salt-Users Forum
|
||||||
url: https://groups.google.com/forum/#!forum/salt-users
|
url: https://groups.google.com/forum/#!forum/salt-users
|
||||||
about: Please ask and answer questions here.
|
about: Please ask and answer questions here.
|
||||||
- name: Salt on LiberaChat
|
|
||||||
url: https://web.libera.chat/#salt
|
|
||||||
about: Please ask and answer questions here.
|
|
||||||
|
|
2
.github/ISSUE_TEMPLATE/tech-debt.md
vendored
2
.github/ISSUE_TEMPLATE/tech-debt.md
vendored
|
@ -8,7 +8,7 @@ assignees: ''
|
||||||
---
|
---
|
||||||
|
|
||||||
### Description of the tech debt to be addressed, include links and screenshots
|
### Description of the tech debt to be addressed, include links and screenshots
|
||||||
<!-- Note: Please direct questions to the salt-users google group, IRC or Community Discord. -->
|
<!-- Note: Please direct questions to the salt-users google group, GitHub Discussions or Community Discord. -->
|
||||||
|
|
||||||
### Versions Report
|
### Versions Report
|
||||||
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)
|
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)
|
||||||
|
|
13
.github/actionlint.yaml
vendored
13
.github/actionlint.yaml
vendored
|
@ -1,14 +1,5 @@
|
||||||
self-hosted-runner:
|
self-hosted-runner:
|
||||||
# Labels of self-hosted runner in array of string
|
# Labels of self-hosted runner in array of string
|
||||||
labels:
|
labels:
|
||||||
- bastion
|
- linux-x86_64
|
||||||
- x86_64
|
- linux-arm64
|
||||||
- arm64
|
|
||||||
- aarch64
|
|
||||||
- amd64
|
|
||||||
- repo-nightly
|
|
||||||
- repo-staging
|
|
||||||
- repo-release
|
|
||||||
- medium
|
|
||||||
- large
|
|
||||||
- macos-13-xlarge
|
|
||||||
|
|
7
.github/actions/cache/action.yml
vendored
7
.github/actions/cache/action.yml
vendored
|
@ -26,10 +26,6 @@ inputs:
|
||||||
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
|
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
|
||||||
default: 'false'
|
default: 'false'
|
||||||
required: false
|
required: false
|
||||||
save-always:
|
|
||||||
description: 'Run the post step to save the cache even if another step before fails'
|
|
||||||
default: 'false'
|
|
||||||
required: false
|
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
|
@ -49,7 +45,6 @@ runs:
|
||||||
echo "GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE=${{ inputs.enableCrossOsArchive }}" | tee -a "${GITHUB_ENV}"
|
echo "GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE=${{ inputs.enableCrossOsArchive }}" | tee -a "${GITHUB_ENV}"
|
||||||
echo "GHA_CACHE_FAIL_ON_CACHE_MISS=${{ inputs.fail-on-cache-miss }}" | tee -a "${GITHUB_ENV}"
|
echo "GHA_CACHE_FAIL_ON_CACHE_MISS=${{ inputs.fail-on-cache-miss }}" | tee -a "${GITHUB_ENV}"
|
||||||
echo "GHA_CACHE_LOOKUP_ONLY=${{ inputs.lookup-only }}" | tee -a "${GITHUB_ENV}"
|
echo "GHA_CACHE_LOOKUP_ONLY=${{ inputs.lookup-only }}" | tee -a "${GITHUB_ENV}"
|
||||||
echo "GHA_CACHE_SAVE_ALWAYS=${{ inputs.save-always }}" | tee -a "${GITHUB_ENV}"
|
|
||||||
echo "GHA_CACHE_RESTORE_KEYS=${{ inputs.restore-keys }}" | tee -a "${GITHUB_ENV}"
|
echo "GHA_CACHE_RESTORE_KEYS=${{ inputs.restore-keys }}" | tee -a "${GITHUB_ENV}"
|
||||||
echo "GHA_CACHE_UPLOAD_CHUNK_SIZE=${{ inputs.upload-chunk-size }}" | tee -a "${GITHUB_ENV}"
|
echo "GHA_CACHE_UPLOAD_CHUNK_SIZE=${{ inputs.upload-chunk-size }}" | tee -a "${GITHUB_ENV}"
|
||||||
|
|
||||||
|
@ -63,7 +58,6 @@ runs:
|
||||||
enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
|
enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
|
||||||
fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
|
fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
|
||||||
lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
|
lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
|
||||||
save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }}
|
|
||||||
restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
|
restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
|
||||||
upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}
|
upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}
|
||||||
|
|
||||||
|
@ -97,7 +91,6 @@ runs:
|
||||||
enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
|
enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
|
||||||
fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
|
fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
|
||||||
lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
|
lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
|
||||||
save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }}
|
|
||||||
restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
|
restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
|
||||||
upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}
|
upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}
|
||||||
|
|
||||||
|
|
|
@ -54,10 +54,13 @@ runs:
|
||||||
working-directory: ${{ inputs.cwd }}
|
working-directory: ${{ inputs.cwd }}
|
||||||
run: |
|
run: |
|
||||||
PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }}
|
PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }}
|
||||||
|
${PYTHON_EXE} -m ensurepip --upgrade
|
||||||
(${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1
|
(${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1
|
||||||
if [ $exitcode -eq 0 ]; then
|
if [ $exitcode -eq 0 ]; then
|
||||||
|
${PYTHON_EXE} -m pip install --break-system-packages --upgrade setuptools
|
||||||
${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
|
${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
|
||||||
else
|
else
|
||||||
|
${PYTHON_EXE} -m pip install --upgrade setuptools
|
||||||
${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
|
${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
12
.github/config.yml
vendored
12
.github/config.yml
vendored
|
@ -11,18 +11,16 @@ newIssueWelcomeComment: >
|
||||||
Also, check out some of our community
|
Also, check out some of our community
|
||||||
resources including:
|
resources including:
|
||||||
|
|
||||||
- [Community Wiki](https://github.com/saltstack/community/wiki)
|
|
||||||
- [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
|
- [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
|
||||||
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
|
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
|
||||||
- [IRC on LiberaChat](https://web.libera.chat/#salt)
|
|
||||||
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
|
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
|
||||||
- [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss)
|
- [Community Wiki](https://github.com/saltstack/community/wiki)
|
||||||
|
|
||||||
There are lots of ways to get involved in our community. Every month, there are around a dozen
|
There are lots of ways to get involved in our community. Every month, there are around a dozen
|
||||||
opportunities to meet with other contributors and the Salt Core team and collaborate in real
|
opportunities to meet with other contributors and the Salt Core team and collaborate in real
|
||||||
time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
|
time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
|
||||||
|
|
||||||
If you have additional questions, email us at saltproject@vmware.com. We’re glad
|
If you have additional questions, email us at saltproject.pdl@broadcom.com. We’re glad
|
||||||
you’ve joined our community and look forward to doing awesome things with
|
you’ve joined our community and look forward to doing awesome things with
|
||||||
you!
|
you!
|
||||||
|
|
||||||
|
@ -37,18 +35,16 @@ newPRWelcomeComment: >
|
||||||
Also, check out some of our community
|
Also, check out some of our community
|
||||||
resources including:
|
resources including:
|
||||||
|
|
||||||
- [Community Wiki](https://github.com/saltstack/community/wiki)
|
|
||||||
- [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
|
- [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
|
||||||
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
|
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
|
||||||
- [IRC on LiberaChat](https://web.libera.chat/#salt)
|
|
||||||
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
|
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
|
||||||
- [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss)
|
- [Community Wiki](https://github.com/saltstack/community/wiki)
|
||||||
|
|
||||||
There are lots of ways to get involved in our community. Every month, there are around a dozen
|
There are lots of ways to get involved in our community. Every month, there are around a dozen
|
||||||
opportunities to meet with other contributors and the Salt Core team and collaborate in real
|
opportunities to meet with other contributors and the Salt Core team and collaborate in real
|
||||||
time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
|
time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
|
||||||
|
|
||||||
If you have additional questions, email us at saltproject@vmware.com. We’re glad
|
If you have additional questions, email us at saltproject.pdl@broadcom.com. We’re glad
|
||||||
you’ve joined our community and look forward to doing awesome things with
|
you’ve joined our community and look forward to doing awesome things with
|
||||||
you!
|
you!
|
||||||
|
|
||||||
|
|
171
.github/workflows/build-deps-ci-action.yml
vendored
171
.github/workflows/build-deps-ci-action.yml
vendored
|
@ -34,6 +34,14 @@ on:
|
||||||
type: string
|
type: string
|
||||||
description: The onedir package name to use
|
description: The onedir package name to use
|
||||||
default: salt
|
default: salt
|
||||||
|
matrix:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
linux_arm_runner:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
|
@ -48,54 +56,22 @@ env:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
generate-matrix:
|
|
||||||
name: Generate Matrix
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
- name: Generate Test Matrix
|
|
||||||
id: generate-matrix
|
|
||||||
run: |
|
|
||||||
tools ci deps-matrix
|
|
||||||
|
|
||||||
|
|
||||||
linux-dependencies:
|
linux-dependencies:
|
||||||
name: Linux
|
name: Linux
|
||||||
needs:
|
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
|
||||||
- generate-matrix
|
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'false'
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['linux'] }}
|
include: ${{ fromJSON(inputs.matrix)['linux'] }}
|
||||||
steps:
|
steps:
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
- name: "Throttle Builds"
|
||||||
shell: bash
|
shell: bash
|
||||||
|
@ -105,6 +81,10 @@ jobs:
|
||||||
- name: Checkout Source Code
|
- name: Checkout Source Code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
- name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }}
|
- name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }}
|
||||||
id: nox-dependencies-cache
|
id: nox-dependencies-cache
|
||||||
uses: ./.github/actions/cache
|
uses: ./.github/actions/cache
|
||||||
|
@ -138,53 +118,34 @@ jobs:
|
||||||
with:
|
with:
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
|
cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
- name: Install System Dependencies
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
echo true
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Start VM
|
- name: Install Nox
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
id: spin-up-vm
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: List Free Space
|
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
|
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
||||||
|
|
||||||
- name: Upload Checkout To VM
|
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm rsync ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
|
env:
|
||||||
|
PRINT_TEST_SELECTION: "0"
|
||||||
|
PRINT_SYSTEM_INFO: "0"
|
||||||
|
RELENV_BUILDENV: "1"
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }}
|
nox --install-only -e ${{ inputs.nox-session }}
|
||||||
|
|
||||||
- name: Cleanup .nox Directory
|
- name: Cleanup .nox Directory
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }}
|
nox --force-color -e "pre-archive-cleanup(pkg=False)"
|
||||||
|
|
||||||
- name: Compress .nox Directory
|
- name: Compress .nox Directory
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }}
|
nox --force-color -e compress-dependencies -- linux ${{ matrix.arch }}
|
||||||
|
|
||||||
- name: Download Compressed .nox Directory
|
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm download-dependencies ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: Destroy VM
|
|
||||||
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: Upload Nox Requirements Tarball
|
- name: Upload Nox Requirements Tarball
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
@ -194,14 +155,13 @@ jobs:
|
||||||
|
|
||||||
macos-dependencies:
|
macos-dependencies:
|
||||||
name: MacOS
|
name: MacOS
|
||||||
needs:
|
runs-on: ${{ matrix.arch == 'x86_64' && 'macos-13' || 'macos-14' }}
|
||||||
- generate-matrix
|
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
|
||||||
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
|
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['macos'] }}
|
include: ${{ fromJSON(inputs.matrix)['macos'] }}
|
||||||
env:
|
env:
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
steps:
|
steps:
|
||||||
|
@ -280,21 +240,19 @@ jobs:
|
||||||
name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
|
name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
|
||||||
path: nox.macos.${{ matrix.arch }}.tar.*
|
path: nox.macos.${{ matrix.arch }}.tar.*
|
||||||
|
|
||||||
|
|
||||||
windows-dependencies:
|
windows-dependencies:
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
name: Windows
|
name: Windows
|
||||||
runs-on:
|
runs-on: windows-latest
|
||||||
- self-hosted
|
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'false'
|
||||||
|
GITHUB_WORKSPACE: 'C:\Windows\Temp\testing'
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['windows'] }}
|
include: ${{ fromJSON(inputs.matrix)['windows'] }}
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
- name: "Throttle Builds"
|
||||||
|
@ -302,6 +260,10 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
||||||
|
|
||||||
|
- name: "Show environment"
|
||||||
|
run: |
|
||||||
|
env
|
||||||
|
|
||||||
- name: Checkout Source Code
|
- name: Checkout Source Code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
@ -327,10 +289,11 @@ jobs:
|
||||||
cd artifacts
|
cd artifacts
|
||||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
|
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
|
||||||
|
|
||||||
- name: PyPi Proxy
|
- name: Set up Python ${{ inputs.python-version }}
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
uses: actions/setup-python@v5
|
||||||
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
|
with:
|
||||||
|
python-version: "${{ inputs.python-version }}"
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
- name: Setup Python Tools Scripts
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
|
@ -338,53 +301,33 @@ jobs:
|
||||||
with:
|
with:
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
|
cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
- name: Install System Dependencies
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
echo true
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Start VM
|
- name: Install Nox
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
id: spin-up-vm
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: List Free Space
|
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true
|
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
||||||
|
|
||||||
- name: Upload Checkout To VM
|
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm rsync ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
|
env:
|
||||||
|
PRINT_TEST_SELECTION: "0"
|
||||||
|
PRINT_SYSTEM_INFO: "0"
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }}
|
nox --install-only -e ${{ inputs.nox-session }}
|
||||||
|
|
||||||
- name: Cleanup .nox Directory
|
- name: Cleanup .nox Directory
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }}
|
nox --force-color -e "pre-archive-cleanup(pkg=False)"
|
||||||
|
|
||||||
- name: Compress .nox Directory
|
- name: Compress .nox Directory
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }}
|
nox --force-color -e compress-dependencies -- windows ${{ matrix.arch }}
|
||||||
|
|
||||||
- name: Download Compressed .nox Directory
|
|
||||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm download-dependencies ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: Destroy VM
|
|
||||||
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }}
|
|
||||||
|
|
||||||
- name: Upload Nox Requirements Tarball
|
- name: Upload Nox Requirements Tarball
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
|
49
.github/workflows/build-deps-onedir.yml
vendored
49
.github/workflows/build-deps-onedir.yml
vendored
|
@ -8,12 +8,6 @@ on:
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
description: The Salt version to set prior to building packages.
|
description: The Salt version to set prior to building packages.
|
||||||
github-hosted-runners:
|
|
||||||
type: boolean
|
|
||||||
required: true
|
|
||||||
self-hosted-runners:
|
|
||||||
type: boolean
|
|
||||||
required: true
|
|
||||||
cache-seed:
|
cache-seed:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
@ -26,6 +20,14 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
description: The version of python to use with relenv
|
description: The version of python to use with relenv
|
||||||
|
matrix:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
linux_arm_runner:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
|
||||||
env:
|
env:
|
||||||
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
||||||
|
@ -41,20 +43,15 @@ jobs:
|
||||||
|
|
||||||
build-deps-linux:
|
build-deps-linux:
|
||||||
name: Linux
|
name: Linux
|
||||||
if: ${{ inputs.self-hosted-runners }}
|
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
|
||||||
|
runs-on:
|
||||||
|
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
arch:
|
include: ${{ fromJSON(inputs.matrix)['linux'] }}
|
||||||
- x86_64
|
|
||||||
- arm64
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- medium
|
|
||||||
- ${{ matrix.arch }}
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'false'
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
- name: "Throttle Builds"
|
||||||
|
@ -64,6 +61,10 @@ jobs:
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
- name: Setup Python Tools Scripts
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
uses: ./.github/actions/setup-python-tools-scripts
|
||||||
with:
|
with:
|
||||||
|
@ -89,19 +90,23 @@ jobs:
|
||||||
|
|
||||||
build-deps-macos:
|
build-deps-macos:
|
||||||
name: macOS
|
name: macOS
|
||||||
if: ${{ inputs.github-hosted-runners }}
|
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 2
|
max-parallel: 2
|
||||||
matrix:
|
matrix:
|
||||||
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
|
include: ${{ fromJSON(inputs.matrix)['macos'] }}
|
||||||
runs-on:
|
runs-on:
|
||||||
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
|
- ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'false'
|
USE_S3_CACHE: 'false'
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
|
- name: "Check cores"
|
||||||
|
shell: bash
|
||||||
|
run: sysctl -n hw.ncpu
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
- name: "Throttle Builds"
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
@ -139,14 +144,12 @@ jobs:
|
||||||
|
|
||||||
build-deps-windows:
|
build-deps-windows:
|
||||||
name: Windows
|
name: Windows
|
||||||
if: ${{ inputs.github-hosted-runners }}
|
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 2
|
max-parallel: 2
|
||||||
matrix:
|
matrix:
|
||||||
arch:
|
include: ${{ fromJSON(inputs.matrix)['windows'] }}
|
||||||
- x86
|
|
||||||
- amd64
|
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'false'
|
USE_S3_CACHE: 'false'
|
||||||
|
|
8
.github/workflows/build-docs.yml
vendored
8
.github/workflows/build-docs.yml
vendored
|
@ -24,15 +24,15 @@ jobs:
|
||||||
build:
|
build:
|
||||||
name: Build
|
name: Build
|
||||||
runs-on:
|
runs-on:
|
||||||
- ubuntu-latest
|
- ubuntu-22.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
docs-output:
|
docs-output:
|
||||||
- linkcheck
|
# XXX re-enable lintcheck and spellcheck then fix the errors
|
||||||
- spellcheck
|
# - linkcheck
|
||||||
|
# - spellcheck
|
||||||
- html
|
- html
|
||||||
# - pdf
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
397
.github/workflows/build-packages.yml
vendored
397
.github/workflows/build-packages.yml
vendored
|
@ -36,6 +36,14 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
description: Seed used to invalidate caches
|
description: Seed used to invalidate caches
|
||||||
|
matrix:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
linux_arm_runner:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
|
||||||
env:
|
env:
|
||||||
COLUMNS: 190
|
COLUMNS: 190
|
||||||
|
@ -46,19 +54,199 @@ env:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
|
build-deb-packages:
|
||||||
|
name: DEB
|
||||||
|
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
|
||||||
|
runs-on:
|
||||||
|
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.matrix)['linux'] }}
|
||||||
|
|
||||||
|
container:
|
||||||
|
image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# Checkout here so we can easily use custom actions
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# We need a more recent rustc
|
||||||
|
- name: Install a more recent `rustc`
|
||||||
|
if: ${{ inputs.source == 'src' }}
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
|
- name: Set rust environment variables
|
||||||
|
if: ${{ inputs.source == 'src' }}
|
||||||
|
run: |
|
||||||
|
CARGO_HOME=${CARGO_HOME:-${HOME}/.cargo}
|
||||||
|
export CARGO_HOME
|
||||||
|
echo "CARGO_HOME=${CARGO_HOME}" | tee -a "${GITHUB_ENV}"
|
||||||
|
echo "${CARGO_HOME}/bin" | tee -a "${GITHUB_PATH}"
|
||||||
|
|
||||||
|
# Checkout here for the build process
|
||||||
|
- name: Checkout in build directory
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path:
|
||||||
|
pkgs/checkout/
|
||||||
|
|
||||||
|
- name: Download Onedir Tarball as an Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
|
||||||
|
path: pkgs/checkout/artifacts/
|
||||||
|
|
||||||
|
- name: Download Release Patch
|
||||||
|
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}.patch
|
||||||
|
path: pkgs/checkout/
|
||||||
|
|
||||||
|
- name: Setup Python Tools Scripts
|
||||||
|
uses: ./.github/actions/setup-python-tools-scripts
|
||||||
|
with:
|
||||||
|
cwd: pkgs/checkout/
|
||||||
|
cache-prefix: ${{ inputs.cache-prefix }}
|
||||||
|
|
||||||
|
- name: Setup Salt Version
|
||||||
|
id: setup-salt-version
|
||||||
|
uses: ./.github/actions/setup-salt-version
|
||||||
|
with:
|
||||||
|
salt-version: "${{ inputs.salt-version }}"
|
||||||
|
cwd: pkgs/checkout/
|
||||||
|
|
||||||
|
- name: Configure Git
|
||||||
|
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||||
|
working-directory: pkgs/checkout/
|
||||||
|
run: |
|
||||||
|
tools pkg configure-git
|
||||||
|
|
||||||
|
- name: Apply release patch
|
||||||
|
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||||
|
working-directory: pkgs/checkout/
|
||||||
|
run: |
|
||||||
|
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
|
||||||
|
|
||||||
|
- name: Build Deb
|
||||||
|
working-directory: pkgs/checkout/
|
||||||
|
run: |
|
||||||
|
tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
|
||||||
|
inputs.source == 'onedir' &&
|
||||||
|
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
|
||||||
|
||
|
||||||
|
format('--arch={0}', matrix.arch)
|
||||||
|
}}
|
||||||
|
|
||||||
|
- name: Cleanup
|
||||||
|
run: |
|
||||||
|
rm -rf pkgs/checkout/
|
||||||
|
|
||||||
|
- name: Set Artifact Name
|
||||||
|
id: set-artifact-name
|
||||||
|
run: |
|
||||||
|
if [ "${{ inputs.source }}" != "src" ]; then
|
||||||
|
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload DEBs
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
|
||||||
|
path: ${{ github.workspace }}/pkgs/*
|
||||||
|
retention-days: 7
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
build-rpm-packages:
|
||||||
|
name: RPM
|
||||||
|
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
|
||||||
|
runs-on:
|
||||||
|
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.matrix)['linux'] }}
|
||||||
|
|
||||||
|
container:
|
||||||
|
image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download Onedir Tarball as an Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
|
||||||
|
path: artifacts/
|
||||||
|
|
||||||
|
- name: Download Release Patch
|
||||||
|
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}.patch
|
||||||
|
|
||||||
|
- name: Setup Python Tools Scripts
|
||||||
|
uses: ./.github/actions/setup-python-tools-scripts
|
||||||
|
with:
|
||||||
|
cache-prefix: ${{ inputs.cache-prefix }}
|
||||||
|
|
||||||
|
- name: Setup Salt Version
|
||||||
|
id: setup-salt-version
|
||||||
|
uses: ./.github/actions/setup-salt-version
|
||||||
|
with:
|
||||||
|
salt-version: "${{ inputs.salt-version }}"
|
||||||
|
|
||||||
|
- name: Configure Git
|
||||||
|
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||||
|
run: |
|
||||||
|
tools pkg configure-git
|
||||||
|
|
||||||
|
- name: Apply release patch
|
||||||
|
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||||
|
run: |
|
||||||
|
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
|
||||||
|
|
||||||
|
- name: Build RPM
|
||||||
|
run: |
|
||||||
|
tools pkg build rpm --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
|
||||||
|
inputs.source == 'onedir' &&
|
||||||
|
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
|
||||||
|
||
|
||||||
|
format('--arch={0}', matrix.arch)
|
||||||
|
}}
|
||||||
|
|
||||||
|
- name: Set Artifact Name
|
||||||
|
id: set-artifact-name
|
||||||
|
run: |
|
||||||
|
if [ "${{ inputs.source }}" != "src" ]; then
|
||||||
|
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload RPMs
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
|
||||||
|
path: ~/rpmbuild/RPMS/${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}/*.rpm
|
||||||
|
retention-days: 7
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
build-macos-pkgs:
|
build-macos-pkgs:
|
||||||
name: macOS
|
name: macOS
|
||||||
|
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
|
||||||
environment: ${{ inputs.environment }}
|
environment: ${{ inputs.environment }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
|
include: ${{ fromJSON(inputs.matrix)['macos'] }}
|
||||||
source:
|
|
||||||
- ${{ inputs.source }}
|
|
||||||
env:
|
env:
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
runs-on:
|
runs-on:
|
||||||
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
|
- ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check Package Signing Enabled
|
- name: Check Package Signing Enabled
|
||||||
|
@ -162,212 +350,15 @@ jobs:
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
|
|
||||||
build-deb-packages:
|
|
||||||
name: DEB
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- medium
|
|
||||||
- ${{ matrix.arch }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch:
|
|
||||||
- x86_64
|
|
||||||
- arm64
|
|
||||||
source:
|
|
||||||
- ${{ inputs.source }}
|
|
||||||
|
|
||||||
container:
|
|
||||||
image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# Checkout here so we can easily use custom actions
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
# We need a more recent rustc
|
|
||||||
- name: Install a more recent `rustc`
|
|
||||||
if: ${{ inputs.source == 'src' }}
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
|
|
||||||
- name: Set rust environment variables
|
|
||||||
if: ${{ inputs.source == 'src' }}
|
|
||||||
run: |
|
|
||||||
CARGO_HOME=${CARGO_HOME:-${HOME}/.cargo}
|
|
||||||
export CARGO_HOME
|
|
||||||
echo "CARGO_HOME=${CARGO_HOME}" | tee -a "${GITHUB_ENV}"
|
|
||||||
echo "${CARGO_HOME}/bin" | tee -a "${GITHUB_PATH}"
|
|
||||||
|
|
||||||
# Checkout here for the build process
|
|
||||||
- name: Checkout in build directory
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
path:
|
|
||||||
pkgs/checkout/
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
|
|
||||||
path: pkgs/checkout/artifacts/
|
|
||||||
|
|
||||||
- name: Download Release Patch
|
|
||||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: salt-${{ inputs.salt-version }}.patch
|
|
||||||
path: pkgs/checkout/
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cwd: pkgs/checkout/
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Setup Salt Version
|
|
||||||
id: setup-salt-version
|
|
||||||
uses: ./.github/actions/setup-salt-version
|
|
||||||
with:
|
|
||||||
salt-version: "${{ inputs.salt-version }}"
|
|
||||||
cwd: pkgs/checkout/
|
|
||||||
|
|
||||||
- name: Configure Git
|
|
||||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
|
||||||
working-directory: pkgs/checkout/
|
|
||||||
run: |
|
|
||||||
tools pkg configure-git
|
|
||||||
|
|
||||||
- name: Apply release patch
|
|
||||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
|
||||||
working-directory: pkgs/checkout/
|
|
||||||
run: |
|
|
||||||
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
|
|
||||||
|
|
||||||
- name: Build Deb
|
|
||||||
working-directory: pkgs/checkout/
|
|
||||||
run: |
|
|
||||||
tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
|
|
||||||
inputs.source == 'onedir' &&
|
|
||||||
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
|
|
||||||
||
|
|
||||||
format('--arch={0}', matrix.arch)
|
|
||||||
}}
|
|
||||||
|
|
||||||
- name: Cleanup
|
|
||||||
run: |
|
|
||||||
rm -rf pkgs/checkout/
|
|
||||||
|
|
||||||
- name: Set Artifact Name
|
|
||||||
id: set-artifact-name
|
|
||||||
run: |
|
|
||||||
if [ "${{ inputs.source }}" != "src" ]; then
|
|
||||||
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Upload DEBs
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
|
|
||||||
path: ${{ github.workspace }}/pkgs/*
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: error
|
|
||||||
|
|
||||||
build-rpm-packages:
|
|
||||||
name: RPM
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- medium
|
|
||||||
- ${{ matrix.arch }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch:
|
|
||||||
- x86_64
|
|
||||||
- arm64
|
|
||||||
source:
|
|
||||||
- ${{ inputs.source }}
|
|
||||||
|
|
||||||
container:
|
|
||||||
image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
|
|
||||||
path: artifacts/
|
|
||||||
|
|
||||||
- name: Download Release Patch
|
|
||||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: salt-${{ inputs.salt-version }}.patch
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Setup Salt Version
|
|
||||||
id: setup-salt-version
|
|
||||||
uses: ./.github/actions/setup-salt-version
|
|
||||||
with:
|
|
||||||
salt-version: "${{ inputs.salt-version }}"
|
|
||||||
|
|
||||||
- name: Configure Git
|
|
||||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
|
||||||
run: |
|
|
||||||
tools pkg configure-git
|
|
||||||
|
|
||||||
- name: Apply release patch
|
|
||||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
|
||||||
run: |
|
|
||||||
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
|
|
||||||
|
|
||||||
- name: Build RPM
|
|
||||||
run: |
|
|
||||||
tools pkg build rpm --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
|
|
||||||
inputs.source == 'onedir' &&
|
|
||||||
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
|
|
||||||
||
|
|
||||||
format('--arch={0}', matrix.arch)
|
|
||||||
}}
|
|
||||||
|
|
||||||
- name: Set Artifact Name
|
|
||||||
id: set-artifact-name
|
|
||||||
run: |
|
|
||||||
if [ "${{ inputs.source }}" != "src" ]; then
|
|
||||||
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
|
|
||||||
else
|
|
||||||
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Upload RPMs
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
|
|
||||||
path: ~/rpmbuild/RPMS/${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}/*.rpm
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: error
|
|
||||||
|
|
||||||
build-windows-pkgs:
|
build-windows-pkgs:
|
||||||
name: Windows
|
name: Windows
|
||||||
|
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
|
||||||
environment: ${{ inputs.environment }}
|
environment: ${{ inputs.environment }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 2
|
max-parallel: 2
|
||||||
matrix:
|
matrix:
|
||||||
arch:
|
include: ${{ fromJSON(inputs.matrix)['windows'] }}
|
||||||
- x86
|
|
||||||
- amd64
|
|
||||||
source:
|
|
||||||
- ${{ inputs.source }}
|
|
||||||
|
|
||||||
runs-on:
|
runs-on:
|
||||||
- windows-latest
|
- windows-latest
|
||||||
env:
|
env:
|
||||||
|
|
49
.github/workflows/build-salt-onedir.yml
vendored
49
.github/workflows/build-salt-onedir.yml
vendored
|
@ -8,12 +8,6 @@ on:
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
description: The Salt version to set prior to building packages.
|
description: The Salt version to set prior to building packages.
|
||||||
github-hosted-runners:
|
|
||||||
type: boolean
|
|
||||||
required: true
|
|
||||||
self-hosted-runners:
|
|
||||||
type: boolean
|
|
||||||
required: true
|
|
||||||
cache-seed:
|
cache-seed:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
@ -26,6 +20,14 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
description: The version of python to use with relenv
|
description: The version of python to use with relenv
|
||||||
|
matrix:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Json config for build matrix
|
||||||
|
linux_arm_runner:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
|
||||||
env:
|
env:
|
||||||
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
||||||
|
@ -39,21 +41,18 @@ env:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
|
|
||||||
build-salt-linux:
|
build-salt-linux:
|
||||||
name: Linux
|
name: Linux
|
||||||
if: ${{ inputs.self-hosted-runners }}
|
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'false'
|
||||||
|
runs-on:
|
||||||
|
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
arch:
|
include: ${{ fromJSON(inputs.matrix)['linux'] }}
|
||||||
- x86_64
|
|
||||||
- arm64
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- ${{ matrix.arch }}
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
- name: "Throttle Builds"
|
||||||
|
@ -63,6 +62,10 @@ jobs:
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
- name: Setup Python Tools Scripts
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
uses: ./.github/actions/setup-python-tools-scripts
|
||||||
with:
|
with:
|
||||||
|
@ -95,18 +98,22 @@ jobs:
|
||||||
|
|
||||||
build-salt-macos:
|
build-salt-macos:
|
||||||
name: macOS
|
name: macOS
|
||||||
if: ${{ inputs.github-hosted-runners }}
|
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 2
|
max-parallel: 2
|
||||||
matrix:
|
matrix:
|
||||||
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
|
include: ${{ fromJSON(inputs.matrix)['macos'] }}
|
||||||
runs-on:
|
runs-on:
|
||||||
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
|
- ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
|
||||||
env:
|
env:
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: "Check cores"
|
||||||
|
shell: bash
|
||||||
|
run: sysctl -n hw.ncpu
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
- name: "Throttle Builds"
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
@ -150,14 +157,12 @@ jobs:
|
||||||
|
|
||||||
build-salt-windows:
|
build-salt-windows:
|
||||||
name: Windows
|
name: Windows
|
||||||
if: ${{ inputs.github-hosted-runners }}
|
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 2
|
max-parallel: 2
|
||||||
matrix:
|
matrix:
|
||||||
arch:
|
include: ${{ fromJSON(inputs.matrix)['windows'] }}
|
||||||
- x86
|
|
||||||
- amd64
|
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
env:
|
env:
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
|
|
1648
.github/workflows/ci.yml
vendored
1648
.github/workflows/ci.yml
vendored
File diff suppressed because it is too large
Load diff
132
.github/workflows/draft-release.yml
vendored
Normal file
132
.github/workflows/draft-release.yml
vendored
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
---
|
||||||
|
name: Draft Github Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
salt-version:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: The Salt version to set prior to building packages.
|
||||||
|
matrix:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
build-matrix:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
|
||||||
|
env:
|
||||||
|
COLUMNS: 190
|
||||||
|
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
||||||
|
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
||||||
|
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
||||||
|
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
list-artifacts:
|
||||||
|
name: List Artifacts
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
# Checkout here so we can easily use custom actions
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: artifacts/
|
||||||
|
- name: List Directory Structure
|
||||||
|
run: ls -R artifacts/
|
||||||
|
|
||||||
|
create-github-release:
|
||||||
|
name: Draft Release v${{ inputs.salt-version }}
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
outputs:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
steps:
|
||||||
|
- name: Create Release
|
||||||
|
id: create_release
|
||||||
|
uses: actions/create-release@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
release_name: "Release v${{ inputs.salt-version }}"
|
||||||
|
tag_name: v${{ inputs.salt-version }}
|
||||||
|
draft: true
|
||||||
|
prerelease: false
|
||||||
|
- name: Release Output
|
||||||
|
run: echo "upload_url=${{ steps.create_release.outputs.upload_url }}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
upload-source-tarball:
|
||||||
|
needs:
|
||||||
|
- create-github-release
|
||||||
|
uses: ./.github/workflows/release-artifact.yml
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}.tar.gz
|
||||||
|
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
upload-onedir:
|
||||||
|
needs:
|
||||||
|
- create-github-release
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.matrix) }}
|
||||||
|
uses: ./.github/workflows/release-artifact.yml
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.${{ matrix.platform == 'windows' && 'zip' || 'tar.xz' }}
|
||||||
|
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
upload-deb-packages:
|
||||||
|
needs:
|
||||||
|
- create-github-release
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.build-matrix)['linux'] }}
|
||||||
|
uses: ./.github/workflows/release-artifact.yml
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb
|
||||||
|
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
|
||||||
|
pattern: "*.deb"
|
||||||
|
|
||||||
|
upload-rpm-packages:
|
||||||
|
needs:
|
||||||
|
- create-github-release
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.build-matrix)['linux'] }}
|
||||||
|
uses: ./.github/workflows/release-artifact.yml
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm
|
||||||
|
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
upload-mac-packages:
|
||||||
|
needs:
|
||||||
|
- create-github-release
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.build-matrix)['macos'] }}
|
||||||
|
uses: ./.github/workflows/release-artifact.yml
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos
|
||||||
|
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
upload-windows-msi-packages:
|
||||||
|
needs:
|
||||||
|
- create-github-release
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.build-matrix)['windows'] }}
|
||||||
|
uses: ./.github/workflows/release-artifact.yml
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI
|
||||||
|
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
upload-windows-nsis-packages:
|
||||||
|
needs:
|
||||||
|
- create-github-release
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.build-matrix)['windows'] }}
|
||||||
|
uses: ./.github/workflows/release-artifact.yml
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS
|
||||||
|
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
|
13
.github/workflows/lint-action.yml
vendored
13
.github/workflows/lint-action.yml
vendored
|
@ -18,17 +18,13 @@ env:
|
||||||
jobs:
|
jobs:
|
||||||
Salt:
|
Salt:
|
||||||
name: Lint Salt's Source Code
|
name: Lint Salt's Source Code
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
|
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
|
||||||
|
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/saltstack/salt-ci-containers/python:3.10
|
image: ghcr.io/saltstack/salt-ci-containers/python:3.10
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Install System Deps
|
|
||||||
run: |
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
|
|
||||||
|
|
||||||
- name: Add Git Safe Directory
|
- name: Add Git Safe Directory
|
||||||
run: |
|
run: |
|
||||||
|
@ -62,18 +58,13 @@ jobs:
|
||||||
|
|
||||||
Tests:
|
Tests:
|
||||||
name: Lint Salt's Test Suite
|
name: Lint Salt's Test Suite
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }}
|
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }}
|
||||||
|
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/saltstack/salt-ci-containers/python:3.10
|
image: ghcr.io/saltstack/salt-ci-containers/python:3.10
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Install System Deps
|
|
||||||
run: |
|
|
||||||
echo "deb http://deb.debian.org/debian bookworm-backports main" >> /etc/apt/sources.list
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
|
|
||||||
|
|
||||||
- name: Add Git Safe Directory
|
- name: Add Git Safe Directory
|
||||||
run: |
|
run: |
|
||||||
|
|
2404
.github/workflows/nightly.yml
vendored
2404
.github/workflows/nightly.yml
vendored
File diff suppressed because it is too large
Load diff
9
.github/workflows/pre-commit-action.yml
vendored
9
.github/workflows/pre-commit-action.yml
vendored
|
@ -21,21 +21,16 @@ jobs:
|
||||||
Pre-Commit:
|
Pre-Commit:
|
||||||
name: Run Pre-Commit Against Salt
|
name: Run Pre-Commit Against Salt
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/saltstack/salt-ci-containers/python:3.10
|
image: ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-22.04
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PRE_COMMIT_COLOR: always
|
PRE_COMMIT_COLOR: always
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Install System Deps
|
|
||||||
run: |
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y wget curl enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev rustc
|
|
||||||
|
|
||||||
- name: Add Git Safe Directory
|
- name: Add Git Safe Directory
|
||||||
run: |
|
run: |
|
||||||
git config --global --add safe.directory "$(pwd)"
|
git config --global --add safe.directory "$(pwd)"
|
||||||
|
|
69
.github/workflows/release-artifact.yml
vendored
Normal file
69
.github/workflows/release-artifact.yml
vendored
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
---
|
||||||
|
name: Upload Release Artifact
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: The Salt version to set prior to building packages.
|
||||||
|
upload_url:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Release's upload url.
|
||||||
|
pattern:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
description: Pattern of files to upload
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
list-files:
|
||||||
|
name: List ${{ inputs.name }}
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
outputs:
|
||||||
|
files: ${{ steps.list-files.outputs.files }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.name }}
|
||||||
|
path: artifacts
|
||||||
|
- run: find artifacts -maxdepth 1 -type f -printf '%f\n'
|
||||||
|
- id: list-files
|
||||||
|
run: |
|
||||||
|
if [ "${{ inputs.pattern }}" != "" ]; then
|
||||||
|
echo files="$(find artifacts -maxdepth 1 -type f -name '${{ inputs.pattern }}' -printf '%f\n' | jq -Rnc '[inputs | { file: "\(.)" }]')" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
echo files="$(find artifacts -maxdepth 1 -type f -printf '%f\n' | jq -Rnc '[inputs | { file: "\(.)" }]')" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
upload-files:
|
||||||
|
name: Upload ${{ matrix.file }} from ${{ inputs.name }}
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs:
|
||||||
|
- list-files
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(needs.list-files.outputs.files) }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.name }}
|
||||||
|
path: artifacts
|
||||||
|
|
||||||
|
- name: Detect type of ${{ matrix.file }}
|
||||||
|
id: file-type
|
||||||
|
run: echo "file_type=$( file --mime-type artifacts/${{ matrix.file }} )" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Upload ${{ matrix.file }}
|
||||||
|
id: upload-release-asset-source
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
|
||||||
|
asset_path: artifacts/${{ matrix.file }}
|
||||||
|
asset_name: ${{ matrix.file }}
|
||||||
|
asset_content_type: ${{ steps.file-type.outputs.file_type }}
|
2
.github/workflows/release-tag.yml
vendored
2
.github/workflows/release-tag.yml
vendored
|
@ -32,7 +32,7 @@ jobs:
|
||||||
permissions:
|
permissions:
|
||||||
contents: write # for dev-drprasad/delete-tag-and-release to delete tags or releases
|
contents: write # for dev-drprasad/delete-tag-and-release to delete tags or releases
|
||||||
name: Generate Tag and Github Release
|
name: Generate Tag and Github Release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: dev-drprasad/delete-tag-and-release@v0.2.0
|
- uses: dev-drprasad/delete-tag-and-release@v0.2.0
|
||||||
if: github.event.inputs.reTag == 'true'
|
if: github.event.inputs.reTag == 'true'
|
||||||
|
|
2
.github/workflows/release-update-winrepo.yml
vendored
2
.github/workflows/release-update-winrepo.yml
vendored
|
@ -19,7 +19,7 @@ permissions:
|
||||||
jobs:
|
jobs:
|
||||||
update-winrepo:
|
update-winrepo:
|
||||||
name: Update Winrepo
|
name: Update Winrepo
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout Salt
|
- name: Checkout Salt
|
||||||
|
|
|
@ -31,7 +31,6 @@ jobs:
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- self-hosted
|
||||||
- linux
|
- linux
|
||||||
- repo-release
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout Salt
|
- name: Checkout Salt
|
||||||
|
|
62
.github/workflows/release.yml
vendored
62
.github/workflows/release.yml
vendored
|
@ -21,7 +21,7 @@ on:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
COLUMNS: 190
|
COLUMNS: 190
|
||||||
CACHE_SEED: SEED-2 # Bump the number to invalidate all caches
|
CACHE_SEED: SEED-1 # Bump the number to invalidate all caches
|
||||||
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
||||||
|
@ -37,7 +37,7 @@ jobs:
|
||||||
|
|
||||||
check-requirements:
|
check-requirements:
|
||||||
name: Check Requirements
|
name: Check Requirements
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
environment: release-check
|
environment: release-check
|
||||||
steps:
|
steps:
|
||||||
- name: Check For Admin Permission
|
- name: Check For Admin Permission
|
||||||
|
@ -49,11 +49,9 @@ jobs:
|
||||||
prepare-workflow:
|
prepare-workflow:
|
||||||
name: Prepare Workflow Run
|
name: Prepare Workflow Run
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-release
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'false'
|
||||||
environment: release
|
environment: release
|
||||||
needs:
|
needs:
|
||||||
- check-requirements
|
- check-requirements
|
||||||
|
@ -63,6 +61,7 @@ jobs:
|
||||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||||
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
||||||
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
|
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
|
||||||
|
config: ${{ steps.workflow-config.outputs.config }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
@ -121,12 +120,15 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
|
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Define workflow config
|
||||||
|
id: workflow-config
|
||||||
|
run: |
|
||||||
|
tools ci workflow-config${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||||
|
|
||||||
download-onedir-artifact:
|
download-onedir-artifact:
|
||||||
name: Download Staging Onedir Artifact
|
name: Download Staging Onedir Artifact
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-release
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
environment: release
|
environment: release
|
||||||
|
@ -186,13 +188,13 @@ jobs:
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
||||||
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
|
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
|
||||||
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
|
||||||
|
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||||
|
|
||||||
backup:
|
backup:
|
||||||
name: Backup
|
name: Backup
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-release
|
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
env:
|
env:
|
||||||
|
@ -223,9 +225,7 @@ jobs:
|
||||||
publish-repositories:
|
publish-repositories:
|
||||||
name: Publish Repositories
|
name: Publish Repositories
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-release
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
needs:
|
needs:
|
||||||
|
@ -255,40 +255,17 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }}
|
tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }}
|
||||||
|
|
||||||
pkg-download-tests:
|
|
||||||
name: Package Downloads
|
|
||||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
|
||||||
needs:
|
|
||||||
- prepare-workflow
|
|
||||||
- publish-repositories
|
|
||||||
- build-ci-deps
|
|
||||||
- download-onedir-artifact
|
|
||||||
uses: ./.github/workflows/test-package-downloads-action.yml
|
|
||||||
with:
|
|
||||||
nox-session: ci-test-onedir
|
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
|
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
|
||||||
environment: release
|
|
||||||
nox-version: 2022.8.7
|
|
||||||
python-version: "3.10"
|
|
||||||
skip-code-coverage: true
|
|
||||||
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
release:
|
release:
|
||||||
name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
|
name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
|
||||||
if: ${{ always() && ! failure() && ! cancelled() }}
|
if: ${{ always() && ! failure() && ! cancelled() }}
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-release
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- backup
|
- backup
|
||||||
- publish-repositories
|
- publish-repositories
|
||||||
- pkg-download-tests
|
|
||||||
environment: release
|
environment: release
|
||||||
steps:
|
steps:
|
||||||
- name: Clone The Salt Repository
|
- name: Clone The Salt Repository
|
||||||
|
@ -395,9 +372,7 @@ jobs:
|
||||||
- release
|
- release
|
||||||
environment: release
|
environment: release
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-release
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
steps:
|
steps:
|
||||||
|
@ -449,12 +424,11 @@ jobs:
|
||||||
# on a pull request instead of requiring all
|
# on a pull request instead of requiring all
|
||||||
name: Set the ${{ github.workflow }} Pipeline Exit Status
|
name: Set the ${{ github.workflow }} Pipeline Exit Status
|
||||||
if: always()
|
if: always()
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- check-requirements
|
- check-requirements
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- publish-repositories
|
- publish-repositories
|
||||||
- pkg-download-tests
|
|
||||||
- release
|
- release
|
||||||
- publish-pypi
|
- publish-pypi
|
||||||
- build-ci-deps
|
- build-ci-deps
|
||||||
|
|
1616
.github/workflows/scheduled.yml
vendored
1616
.github/workflows/scheduled.yml
vendored
File diff suppressed because it is too large
Load diff
2496
.github/workflows/staging.yml
vendored
2496
.github/workflows/staging.yml
vendored
File diff suppressed because it is too large
Load diff
|
@ -1,9 +1,10 @@
|
||||||
|
|
||||||
build-ci-deps:
|
build-ci-deps:
|
||||||
<%- do test_salt_needs.append("build-ci-deps") %>
|
<%- do test_salt_needs.append("build-ci-deps") %>
|
||||||
|
<%- do test_salt_linux_needs.append("build-ci-deps") %>
|
||||||
name: CI Deps
|
name: CI Deps
|
||||||
<%- if workflow_slug != 'release' %>
|
<%- if workflow_slug != 'release' %>
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] }}
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
|
@ -20,3 +21,5 @@
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||||
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
|
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
|
||||||
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
|
||||||
|
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||||
|
|
|
@ -23,12 +23,6 @@
|
||||||
with:
|
with:
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Download DEB Packages
|
- name: Download DEB Packages
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -13,12 +13,6 @@
|
||||||
with:
|
with:
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Download macOS x86_64 Packages
|
- name: Download macOS x86_64 Packages
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -13,12 +13,6 @@
|
||||||
with:
|
with:
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Download Linux x86_64 Onedir Archive
|
- name: Download Linux x86_64 Onedir Archive
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
<%- for backend in ("onedir", "src") %>
|
<%- if gh_environment != "ci" -%>
|
||||||
|
<%- set pkg_types = ("onedir", "src") %>
|
||||||
|
<%- else -%>
|
||||||
|
<%- set pkg_types = ("onedir",) %>
|
||||||
|
<%- endif -%>
|
||||||
|
<%- for backend in pkg_types %>
|
||||||
<%- set job_name = "build-pkgs-{}".format(backend) %>
|
<%- set job_name = "build-pkgs-{}".format(backend) %>
|
||||||
<%- if backend == "src" %>
|
<%- if backend == "src" %>
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
|
@ -6,7 +11,7 @@
|
||||||
|
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
name: Build Packages
|
name: Build Packages
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] }}
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- build-salt-onedir
|
- build-salt-onedir
|
||||||
|
@ -17,11 +22,14 @@
|
||||||
relenv-version: "<{ relenv_version }>"
|
relenv-version: "<{ relenv_version }>"
|
||||||
python-version: "<{ python_version }>"
|
python-version: "<{ python_version }>"
|
||||||
source: "<{ backend }>"
|
source: "<{ backend }>"
|
||||||
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
|
||||||
|
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||||
<%- if gh_environment != "ci" %>
|
<%- if gh_environment != "ci" %>
|
||||||
environment: <{ gh_environment }>
|
environment: <{ gh_environment }>
|
||||||
sign-macos-packages: false
|
sign-macos-packages: false
|
||||||
sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %>
|
sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %>
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
|
|
||||||
<%- endfor %>
|
<%- endfor %>
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
<%- for type, display_name in (
|
|
||||||
("src", "Source"),
|
|
||||||
("deb", "DEB"),
|
|
||||||
("rpm", "RPM"),
|
|
||||||
("windows", "Windows"),
|
|
||||||
("macos", "macOS"),
|
|
||||||
("onedir", "Onedir"),
|
|
||||||
) %>
|
|
||||||
|
|
||||||
<%- set job_name = "build-{}-repo".format(type) %>
|
|
||||||
<%- do build_repo_needs.append(job_name) %>
|
|
||||||
|
|
||||||
<{ job_name }>:
|
|
||||||
name: Build Repository
|
|
||||||
environment: <{ gh_environment }>
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
env:
|
|
||||||
USE_S3_CACHE: 'true'
|
|
||||||
needs:
|
|
||||||
- prepare-workflow
|
|
||||||
<%- if type not in ("src", "onedir") %>
|
|
||||||
- build-pkgs-onedir
|
|
||||||
<%- elif type == 'onedir' %>
|
|
||||||
- build-salt-onedir
|
|
||||||
<%- elif type == 'src' %>
|
|
||||||
- build-source-tarball
|
|
||||||
- build-pkgs-src
|
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
<%- include "build-{}-repo.yml.jinja".format(type) %>
|
|
||||||
|
|
||||||
<%- endfor %>
|
|
|
@ -23,12 +23,6 @@
|
||||||
with:
|
with:
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Download RPM Packages
|
- name: Download RPM Packages
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -13,12 +13,6 @@
|
||||||
with:
|
with:
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Download Source Tarball
|
- name: Download Source Tarball
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|
44
.github/workflows/templates/ci.yml.jinja
vendored
44
.github/workflows/templates/ci.yml.jinja
vendored
|
@ -12,7 +12,6 @@
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
name: Pre-Commit
|
name: Pre-Commit
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
|
||||||
uses: ./.github/workflows/pre-commit-action.yml
|
uses: ./.github/workflows/pre-commit-action.yml
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
|
@ -30,7 +29,7 @@
|
||||||
lint:
|
lint:
|
||||||
<%- do conclusion_needs.append('lint') %>
|
<%- do conclusion_needs.append('lint') %>
|
||||||
name: Lint
|
name: Lint
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
|
||||||
uses: ./.github/workflows/lint-action.yml
|
uses: ./.github/workflows/lint-action.yml
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
|
@ -44,7 +43,6 @@
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
name: NSIS Tests
|
name: NSIS Tests
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
|
||||||
uses: ./.github/workflows/nsis-tests.yml
|
uses: ./.github/workflows/nsis-tests.yml
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
|
@ -58,31 +56,19 @@
|
||||||
|
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
|
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
<%- if prepare_actual_release %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- ubuntu-22.04
|
||||||
- linux
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
|
||||||
- medium
|
|
||||||
- x86_64
|
|
||||||
<%- else %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
<%- endif %>
|
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
<%- if not prepare_actual_release %>
|
|
||||||
|
|
||||||
- name: Set up Python 3.10
|
- name: Set up Python 3.10
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
- name: Setup Python Tools Scripts
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
uses: ./.github/actions/setup-python-tools-scripts
|
||||||
with:
|
with:
|
||||||
|
@ -204,7 +190,7 @@
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
name: Documentation
|
name: Documentation
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- build-source-tarball
|
- build-source-tarball
|
||||||
|
@ -221,11 +207,11 @@
|
||||||
|
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
name: Build Source Tarball
|
name: Build Source Tarball
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- prepare-release
|
- prepare-release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
@ -258,29 +244,28 @@
|
||||||
|
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
name: Build Dependencies Onedir
|
name: Build Onedir Dependencies
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
uses: ./.github/workflows/build-deps-onedir.yml
|
uses: ./.github/workflows/build-deps-onedir.yml
|
||||||
with:
|
with:
|
||||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
|
||||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
|
||||||
relenv-version: "<{ relenv_version }>"
|
relenv-version: "<{ relenv_version }>"
|
||||||
python-version: "<{ python_version }>"
|
python-version: "<{ python_version }>"
|
||||||
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
|
||||||
|
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||||
|
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
|
|
||||||
|
|
||||||
<%- set job_name = "build-salt-onedir" %>
|
<%- set job_name = "build-salt-onedir" %>
|
||||||
<%- if includes.get(job_name, True) %>
|
<%- if includes.get(job_name, True) %>
|
||||||
|
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
name: Build Salt Onedir
|
name: Build Salt Onedir
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- build-deps-onedir
|
- build-deps-onedir
|
||||||
|
@ -289,14 +274,13 @@
|
||||||
with:
|
with:
|
||||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
|
||||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
|
||||||
relenv-version: "<{ relenv_version }>"
|
relenv-version: "<{ relenv_version }>"
|
||||||
python-version: "<{ python_version }>"
|
python-version: "<{ python_version }>"
|
||||||
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
|
||||||
|
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||||
|
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
|
|
||||||
|
|
||||||
<%- set job_name = "build-pkgs" %>
|
<%- set job_name = "build-pkgs" %>
|
||||||
<%- if includes.get(job_name, True) %>
|
<%- if includes.get(job_name, True) %>
|
||||||
<%- include "build-packages.yml.jinja" %>
|
<%- include "build-packages.yml.jinja" %>
|
||||||
|
@ -323,7 +307,7 @@
|
||||||
<%- do conclusion_needs.append("combine-all-code-coverage") %>
|
<%- do conclusion_needs.append("combine-all-code-coverage") %>
|
||||||
name: Combine Code Coverage
|
name: Combine Code Coverage
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
needs:
|
needs:
|
||||||
|
|
37
.github/workflows/templates/layout.yml.jinja
vendored
37
.github/workflows/templates/layout.yml.jinja
vendored
|
@ -34,7 +34,7 @@ on:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
COLUMNS: 190
|
COLUMNS: 190
|
||||||
CACHE_SEED: SEED-2 # Bump the number to invalidate all caches
|
CACHE_SEED: SEED-1 # Bump the number to invalidate all caches
|
||||||
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
||||||
|
@ -77,7 +77,8 @@ jobs:
|
||||||
|
|
||||||
prepare-workflow:
|
prepare-workflow:
|
||||||
name: Prepare Workflow Run
|
name: Prepare Workflow Run
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
|
environment: ci
|
||||||
<%- if prepare_workflow_if_check %>
|
<%- if prepare_workflow_if_check %>
|
||||||
if: <{ prepare_workflow_if_check }>
|
if: <{ prepare_workflow_if_check }>
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
|
@ -89,10 +90,7 @@ jobs:
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
outputs:
|
outputs:
|
||||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||||
runners: ${{ steps.runner-types.outputs.runners }}
|
|
||||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||||
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
|
|
||||||
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
|
|
||||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||||
|
@ -101,6 +99,9 @@ jobs:
|
||||||
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
|
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
|
||||||
testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
|
testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
|
||||||
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
|
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
|
||||||
|
config: ${{ steps.workflow-config.outputs.config }}
|
||||||
|
env:
|
||||||
|
LINUX_ARM_RUNNER: ${{ vars.LINUX_ARM_RUNNER }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
@ -213,14 +214,6 @@ jobs:
|
||||||
salt-version: "<{ prepare_workflow_salt_version_input }>"
|
salt-version: "<{ prepare_workflow_salt_version_input }>"
|
||||||
validate-version: true
|
validate-version: true
|
||||||
|
|
||||||
- name: Get Pull Request Test Labels
|
|
||||||
id: get-pull-labels
|
|
||||||
if: ${{ github.event_name == 'pull_request'}}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: |
|
|
||||||
tools ci get-pr-test-labels --repository ${{ github.repository }}
|
|
||||||
|
|
||||||
- name: Get Hash For Nox Tarball Cache
|
- name: Get Hash For Nox Tarball Cache
|
||||||
id: nox-archive-hash
|
id: nox-archive-hash
|
||||||
run: |
|
run: |
|
||||||
|
@ -259,11 +252,6 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||||
|
|
||||||
- name: Define Runner Types
|
|
||||||
id: runner-types
|
|
||||||
run: |
|
|
||||||
tools ci runner-types ${{ github.event_name }}
|
|
||||||
|
|
||||||
- name: Define Jobs To Run
|
- name: Define Jobs To Run
|
||||||
id: define-jobs
|
id: define-jobs
|
||||||
run: |
|
run: |
|
||||||
|
@ -290,6 +278,13 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||||
|
|
||||||
|
- name: Define workflow config
|
||||||
|
id: workflow-config
|
||||||
|
run: |
|
||||||
|
tools ci workflow-config<{ prepare_workflow_skip_test_suite }><{
|
||||||
|
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
|
||||||
|
}> ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||||
|
|
||||||
- name: Check Contents of generated testrun-changed-files.txt
|
- name: Check Contents of generated testrun-changed-files.txt
|
||||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||||
run: |
|
run: |
|
||||||
|
@ -338,7 +333,7 @@ jobs:
|
||||||
# on a pull request instead of requiring all
|
# on a pull request instead of requiring all
|
||||||
name: Set the ${{ github.workflow }} Pipeline Exit Status
|
name: Set the ${{ github.workflow }} Pipeline Exit Status
|
||||||
if: always()
|
if: always()
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
<%- if workflow_slug == "nightly" %>
|
<%- if workflow_slug == "nightly" %>
|
||||||
environment: <{ workflow_slug }>
|
environment: <{ workflow_slug }>
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
|
@ -358,6 +353,10 @@ jobs:
|
||||||
<%- for need in test_repo_needs.iter(consume=True) %>
|
<%- for need in test_repo_needs.iter(consume=True) %>
|
||||||
- <{ need }>
|
- <{ need }>
|
||||||
<%- endfor %>
|
<%- endfor %>
|
||||||
|
<%- if workflow_slug != "release" %>
|
||||||
|
- test-packages
|
||||||
|
- test
|
||||||
|
<%- endif %>
|
||||||
steps:
|
steps:
|
||||||
- name: Get workflow information
|
- name: Get workflow information
|
||||||
id: get-workflow-info
|
id: get-workflow-info
|
||||||
|
|
62
.github/workflows/templates/nightly.yml.jinja
vendored
62
.github/workflows/templates/nightly.yml.jinja
vendored
|
@ -56,67 +56,5 @@ concurrency:
|
||||||
<%- block jobs %>
|
<%- block jobs %>
|
||||||
<{- super() }>
|
<{- super() }>
|
||||||
|
|
||||||
<%- if includes.get("build-repos", True) %>
|
|
||||||
<%- include "build-repos.yml.jinja" %>
|
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
publish-repositories:
|
|
||||||
<%- do conclusion_needs.append('publish-repositories') %>
|
|
||||||
name: Publish Repositories
|
|
||||||
if: ${{ always() && ! failure() && ! cancelled() }}
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
environment: <{ gh_environment }>
|
|
||||||
needs:
|
|
||||||
- prepare-workflow
|
|
||||||
- build-docs
|
|
||||||
<%- for need in build_repo_needs.iter(consume=True) %>
|
|
||||||
- <{ need }>
|
|
||||||
<%- endfor %>
|
|
||||||
<%- if workflow_slug == "nightly" %>
|
|
||||||
<%- for need in test_salt_needs.iter(consume=True) %>
|
|
||||||
- <{ need }>
|
|
||||||
<%- endfor %>
|
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
|
||||||
|
|
||||||
- name: Download Repository Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
pattern: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-repo-*
|
|
||||||
merge-multiple: true
|
|
||||||
path: repo/
|
|
||||||
|
|
||||||
- name: Decompress Repository Artifacts
|
|
||||||
run: |
|
|
||||||
find repo/ -type f -name '*.tar.gz' -print -exec tar xvf {} \;
|
|
||||||
find repo/ -type f -name '*.tar.gz' -print -exec rm -f {} \;
|
|
||||||
|
|
||||||
- name: Show Repository
|
|
||||||
run: |
|
|
||||||
tree -a artifacts/pkgs/repo/
|
|
||||||
|
|
||||||
- name: Upload Repository Contents (<{ gh_environment }>)
|
|
||||||
env:
|
|
||||||
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
|
|
||||||
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
|
|
||||||
run: |
|
|
||||||
tools pkg repo publish <{ gh_environment }> --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/
|
|
||||||
|
|
||||||
<%- endblock jobs %>
|
<%- endblock jobs %>
|
||||||
|
|
45
.github/workflows/templates/release.yml.jinja
vendored
45
.github/workflows/templates/release.yml.jinja
vendored
|
@ -52,7 +52,7 @@ permissions:
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do prepare_workflow_needs.append(job_name) %>
|
<%- do prepare_workflow_needs.append(job_name) %>
|
||||||
name: Check Requirements
|
name: Check Requirements
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
environment: <{ gh_environment }>-check
|
environment: <{ gh_environment }>-check
|
||||||
steps:
|
steps:
|
||||||
- name: Check For Admin Permission
|
- name: Check For Admin Permission
|
||||||
|
@ -71,11 +71,9 @@ permissions:
|
||||||
prepare-workflow:
|
prepare-workflow:
|
||||||
name: Prepare Workflow Run
|
name: Prepare Workflow Run
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'false'
|
||||||
environment: <{ gh_environment }>
|
environment: <{ gh_environment }>
|
||||||
<%- if prepare_workflow_needs %>
|
<%- if prepare_workflow_needs %>
|
||||||
needs:
|
needs:
|
||||||
|
@ -89,6 +87,7 @@ permissions:
|
||||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||||
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
||||||
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
|
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
|
||||||
|
config: ${{ steps.workflow-config.outputs.config }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
@ -147,6 +146,14 @@ permissions:
|
||||||
run: |
|
run: |
|
||||||
echo "nox-archive-hash=<{ nox_archive_hashfiles }>" | tee -a "$GITHUB_OUTPUT"
|
echo "nox-archive-hash=<{ nox_archive_hashfiles }>" | tee -a "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Define workflow config
|
||||||
|
id: workflow-config
|
||||||
|
run: |
|
||||||
|
tools ci workflow-config<{ prepare_workflow_skip_test_suite }><{
|
||||||
|
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
|
||||||
|
}> ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
|
||||||
|
|
||||||
|
|
||||||
<%- endblock prepare_workflow_job %>
|
<%- endblock prepare_workflow_job %>
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
|
|
||||||
|
@ -156,9 +163,7 @@ permissions:
|
||||||
download-onedir-artifact:
|
download-onedir-artifact:
|
||||||
name: Download Staging Onedir Artifact
|
name: Download Staging Onedir Artifact
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
environment: <{ gh_environment }>
|
environment: <{ gh_environment }>
|
||||||
|
@ -211,9 +216,7 @@ permissions:
|
||||||
backup:
|
backup:
|
||||||
name: Backup
|
name: Backup
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
env:
|
env:
|
||||||
|
@ -245,9 +248,7 @@ permissions:
|
||||||
<%- do conclusion_needs.append('publish-repositories') %>
|
<%- do conclusion_needs.append('publish-repositories') %>
|
||||||
name: Publish Repositories
|
name: Publish Repositories
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
needs:
|
needs:
|
||||||
|
@ -277,18 +278,12 @@ permissions:
|
||||||
run: |
|
run: |
|
||||||
tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }}
|
tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }}
|
||||||
|
|
||||||
<%- if includes.get("test-pkg-downloads", True) %>
|
|
||||||
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
|
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
release:
|
release:
|
||||||
<%- do conclusion_needs.append('release') %>
|
<%- do conclusion_needs.append('release') %>
|
||||||
name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
|
name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
|
||||||
if: ${{ always() && ! failure() && ! cancelled() }}
|
if: ${{ always() && ! failure() && ! cancelled() }}
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
needs:
|
needs:
|
||||||
|
@ -402,9 +397,7 @@ permissions:
|
||||||
name: Restore Release Bucket From Backup
|
name: Restore Release Bucket From Backup
|
||||||
if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }}
|
if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }}
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
needs:
|
needs:
|
||||||
|
@ -445,9 +438,7 @@ permissions:
|
||||||
- restore #}
|
- restore #}
|
||||||
environment: <{ gh_environment }>
|
environment: <{ gh_environment }>
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- linux-x86_64
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
steps:
|
steps:
|
||||||
|
|
82
.github/workflows/templates/staging.yml.jinja
vendored
82
.github/workflows/templates/staging.yml.jinja
vendored
|
@ -51,9 +51,9 @@ on:
|
||||||
|
|
||||||
<%- block concurrency %>
|
<%- block concurrency %>
|
||||||
|
|
||||||
concurrency:
|
#concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }}
|
# group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }}
|
||||||
cancel-in-progress: false
|
# cancel-in-progress: false
|
||||||
|
|
||||||
<%- endblock concurrency %>
|
<%- endblock concurrency %>
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ concurrency:
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do prepare_workflow_needs.append(job_name) %>
|
<%- do prepare_workflow_needs.append(job_name) %>
|
||||||
name: Check Requirements
|
name: Check Requirements
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
environment: <{ gh_environment }>-check
|
environment: <{ gh_environment }>-check
|
||||||
steps:
|
steps:
|
||||||
- name: Check For Admin Permission
|
- name: Check For Admin Permission
|
||||||
|
@ -86,21 +86,12 @@ concurrency:
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- build-docs
|
- build-docs
|
||||||
- build-src-repo
|
|
||||||
environment: <{ gh_environment }>
|
environment: <{ gh_environment }>
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- ubuntu-22.04
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
- name: Setup Python Tools Scripts
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
uses: ./.github/actions/setup-python-tools-scripts
|
||||||
with:
|
with:
|
||||||
|
@ -112,12 +103,6 @@ concurrency:
|
||||||
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
|
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
|
||||||
path: artifacts/release
|
path: artifacts/release
|
||||||
|
|
||||||
- name: Download Source Repository
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo
|
|
||||||
path: artifacts/release
|
|
||||||
|
|
||||||
- name: Download Release Documentation (HTML)
|
- name: Download Release Documentation (HTML)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
@ -128,34 +113,6 @@ concurrency:
|
||||||
run: |
|
run: |
|
||||||
tree -a artifacts/release
|
tree -a artifacts/release
|
||||||
|
|
||||||
{#-
|
|
||||||
|
|
||||||
- name: Download Release Documentation (PDF)
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.pdf
|
|
||||||
path: artifacts/release
|
|
||||||
|
|
||||||
#}
|
|
||||||
|
|
||||||
- name: Upload Release Artifacts
|
|
||||||
run: |
|
|
||||||
tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release
|
|
||||||
|
|
||||||
- name: Upload PyPi Artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: pypi-artifacts
|
|
||||||
path: |
|
|
||||||
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
|
|
||||||
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: error
|
|
||||||
|
|
||||||
<%- if includes.get("test-pkg-downloads", True) %>
|
|
||||||
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
|
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
publish-pypi:
|
publish-pypi:
|
||||||
<%- do conclusion_needs.append('publish-pypi') %>
|
<%- do conclusion_needs.append('publish-pypi') %>
|
||||||
name: Publish to PyPi(test)
|
name: Publish to PyPi(test)
|
||||||
|
@ -174,9 +131,7 @@ concurrency:
|
||||||
<%- endfor %>
|
<%- endfor %>
|
||||||
environment: <{ gh_environment }>
|
environment: <{ gh_environment }>
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- ubuntu-22.04
|
||||||
- linux
|
|
||||||
- repo-<{ gh_environment }>
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
@ -221,4 +176,29 @@ concurrency:
|
||||||
run: |
|
run: |
|
||||||
tools pkg pypi-upload --test artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
|
tools pkg pypi-upload --test artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
|
||||||
|
|
||||||
|
draft-release:
|
||||||
|
name: Draft Github Release
|
||||||
|
if: |
|
||||||
|
always() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
|
||||||
|
(needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') &&
|
||||||
|
needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' &&
|
||||||
|
needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success'
|
||||||
|
needs:
|
||||||
|
- prepare-workflow
|
||||||
|
- pre-commit
|
||||||
|
- build-salt-onedir
|
||||||
|
- build-pkgs-onedir
|
||||||
|
- test-packages
|
||||||
|
- test
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: read
|
||||||
|
id-token: write
|
||||||
|
uses: ./.github/workflows/draft-release.yml
|
||||||
|
with:
|
||||||
|
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['artifact-matrix']) }}
|
||||||
|
build-matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
|
||||||
|
|
||||||
|
|
||||||
<%- endblock jobs %>
|
<%- endblock jobs %>
|
||||||
|
|
|
@ -6,13 +6,12 @@
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
name: Package Downloads
|
name: Package Downloads
|
||||||
<%- if gh_environment == "staging" %>
|
<%- if gh_environment == "staging" %>
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] }}
|
||||||
<%- else %>
|
<%- else %>
|
||||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
||||||
<%- endif %>
|
<%- endif %>
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- publish-repositories
|
|
||||||
- build-ci-deps
|
- build-ci-deps
|
||||||
<%- if gh_environment == "release" %>
|
<%- if gh_environment == "release" %>
|
||||||
- download-onedir-artifact
|
- download-onedir-artifact
|
||||||
|
|
|
@ -1,99 +1,19 @@
|
||||||
<%- for os in test_salt_pkg_listing["linux"] %>
|
<%- set job_name = "test-packages" %>
|
||||||
<%- set job_name = "{}-pkg-tests{}".format(os.slug.replace(".", ""), os.fips and '-fips' or '') %>
|
|
||||||
|
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do test_salt_pkg_needs.append(job_name) %>
|
name: Test Package
|
||||||
name: <{ os.display_name }> Package Test<%- if os.fips %> (fips)<%- endif %>
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg'] }}
|
||||||
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
|
||||||
<%- else %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
|
|
||||||
<%- endif %>
|
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- build-pkgs-onedir
|
- build-pkgs-onedir
|
||||||
- build-ci-deps
|
- build-ci-deps
|
||||||
uses: ./.github/workflows/test-packages-action-linux.yml
|
uses: ./.github/workflows/test-packages-action.yml
|
||||||
with:
|
with:
|
||||||
distro-slug: <{ os.slug }>
|
|
||||||
nox-session: ci-test-onedir
|
nox-session: ci-test-onedir
|
||||||
platform: linux
|
|
||||||
arch: <{ os.arch }>
|
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
pkg-type: <{ os.pkg_type }>
|
|
||||||
nox-version: <{ nox_version }>
|
nox-version: <{ nox_version }>
|
||||||
python-version: "<{ gh_actions_workflows_python_version }>"
|
python-version: "<{ gh_actions_workflows_python_version }>"
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||||
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
|
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
|
||||||
<%- if os.fips %>
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
|
||||||
fips: true
|
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
<%- endfor %>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<%- for os in test_salt_pkg_listing["macos"] %>
|
|
||||||
<%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %>
|
|
||||||
|
|
||||||
<{ job_name }>:
|
|
||||||
<%- do test_salt_pkg_needs.append(job_name) %>
|
|
||||||
name: <{ os.display_name }> Package Test
|
|
||||||
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
|
||||||
<%- else %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
|
|
||||||
<%- endif %>
|
|
||||||
needs:
|
|
||||||
- prepare-workflow
|
|
||||||
- build-pkgs-onedir
|
|
||||||
- build-ci-deps
|
|
||||||
uses: ./.github/workflows/test-packages-action-macos.yml
|
|
||||||
with:
|
|
||||||
distro-slug: <{ os.slug }>
|
|
||||||
runner: <{ os.runner }>
|
|
||||||
nox-session: ci-test-onedir
|
|
||||||
platform: macos
|
|
||||||
arch: <{ os.arch }>
|
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
|
||||||
pkg-type: macos
|
|
||||||
nox-version: <{ nox_version }>
|
|
||||||
python-version: "<{ gh_actions_workflows_python_version }>"
|
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
|
||||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
|
||||||
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
|
|
||||||
|
|
||||||
<%- endfor %>
|
|
||||||
|
|
||||||
|
|
||||||
<%- for os in test_salt_pkg_listing["windows"] %>
|
|
||||||
<%- set job_name = "{}-{}-pkg-tests".format(os.slug.replace(".", ""), os.pkg_type.lower()) %>
|
|
||||||
|
|
||||||
<{ job_name }>:
|
|
||||||
<%- do test_salt_pkg_needs.append(job_name) %>
|
|
||||||
name: <{ os.display_name }> <{ os.pkg_type }> Package Test
|
|
||||||
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
|
||||||
<%- else %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
|
|
||||||
<%- endif %>
|
|
||||||
needs:
|
|
||||||
- prepare-workflow
|
|
||||||
- build-pkgs-onedir
|
|
||||||
- build-ci-deps
|
|
||||||
uses: ./.github/workflows/test-packages-action-windows.yml
|
|
||||||
with:
|
|
||||||
distro-slug: <{ os.slug }>
|
|
||||||
nox-session: ci-test-onedir
|
|
||||||
platform: windows
|
|
||||||
arch: <{ os.arch }>
|
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
|
||||||
pkg-type: <{ os.pkg_type }>
|
|
||||||
nox-version: <{ nox_version }>
|
|
||||||
python-version: "<{ gh_actions_workflows_python_version }>"
|
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
|
||||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
|
||||||
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
|
|
||||||
|
|
||||||
<%- endfor %>
|
|
||||||
|
|
105
.github/workflows/templates/test-salt.yml.jinja
vendored
105
.github/workflows/templates/test-salt.yml.jinja
vendored
|
@ -1,103 +1,24 @@
|
||||||
{#-
|
<%- if workflow_slug in ("nightly", "scheduled") %>
|
||||||
Full test runs. Each chunk should never take more than 2 hours. We allow 3, and on windows we add 30 more minutes.
|
<%- set timeout_value = 360 %>
|
||||||
Partial test runs(no chunk parallelization), 6 Hours
|
<%- else %>
|
||||||
#}
|
<%- set timeout_value = 180 %>
|
||||||
<%- set full_testrun_timeout_value = 180 %>
|
<%- endif %>
|
||||||
<%- set partial_testrun_timeout_value = 360 %>
|
test:
|
||||||
<%- set windows_full_testrun_timeout_value = full_testrun_timeout_value + 30 %>
|
name: Test Salt
|
||||||
|
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test'] }}
|
||||||
<%- for os in test_salt_listing["windows"] %>
|
|
||||||
|
|
||||||
<{ os.slug.replace(".", "") }>:
|
|
||||||
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
|
|
||||||
name: <{ os.display_name }> Test
|
|
||||||
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
|
||||||
<%- else %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
|
|
||||||
<%- endif %>
|
|
||||||
needs:
|
needs:
|
||||||
- prepare-workflow
|
- prepare-workflow
|
||||||
- build-ci-deps
|
- build-ci-deps
|
||||||
uses: ./.github/workflows/test-action-windows.yml
|
uses: ./.github/workflows/test-action.yml
|
||||||
with:
|
with:
|
||||||
distro-slug: <{ os.slug }>
|
|
||||||
nox-session: ci-test-onedir
|
nox-session: ci-test-onedir
|
||||||
platform: windows
|
|
||||||
arch: amd64
|
|
||||||
nox-version: <{ nox_version }>
|
nox-version: <{ nox_version }>
|
||||||
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
|
python-version: "<{ gh_actions_workflows_python_version }>"
|
||||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||||
workflow-slug: <{ workflow_slug }>
|
workflow-slug: <{ workflow_slug }>
|
||||||
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ windows_full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
|
default-timeout: <{ timeout_value }>
|
||||||
|
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
|
||||||
<%- endfor %>
|
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
|
||||||
|
|
||||||
|
|
||||||
<%- for os in test_salt_listing["macos"] %>
|
|
||||||
|
|
||||||
<{ os.slug.replace(".", "") }>:
|
|
||||||
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
|
|
||||||
name: <{ os.display_name }> Test
|
|
||||||
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
|
||||||
<%- else %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
|
|
||||||
<%- endif %>
|
|
||||||
needs:
|
|
||||||
- prepare-workflow
|
|
||||||
- build-ci-deps
|
|
||||||
uses: ./.github/workflows/test-action-macos.yml
|
|
||||||
with:
|
|
||||||
distro-slug: <{ os.slug }>
|
|
||||||
runner: <{ os.runner }>
|
|
||||||
nox-session: ci-test-onedir
|
|
||||||
platform: macos
|
|
||||||
arch: <{ os.arch }>
|
|
||||||
nox-version: <{ nox_version }>
|
|
||||||
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
|
|
||||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
|
||||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
|
||||||
workflow-slug: <{ workflow_slug }>
|
|
||||||
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
|
|
||||||
|
|
||||||
<%- endfor %>
|
|
||||||
|
|
||||||
<%- for os in test_salt_listing["linux"] %>
|
|
||||||
<%- set job_name = "{}{}".format(os.slug.replace(".", ""), os.fips and '-fips' or '') %>
|
|
||||||
|
|
||||||
<{ job_name }>:
|
|
||||||
<%- do test_salt_needs.append(job_name) %>
|
|
||||||
name: <{ os.display_name }> Test<%- if os.fips %> (fips)<%- endif %>
|
|
||||||
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
|
||||||
<%- else %>
|
|
||||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
|
|
||||||
<%- endif %>
|
|
||||||
needs:
|
|
||||||
- prepare-workflow
|
|
||||||
- build-ci-deps
|
|
||||||
uses: ./.github/workflows/test-action-linux.yml
|
|
||||||
with:
|
|
||||||
distro-slug: <{ os.slug }>
|
|
||||||
nox-session: ci-test-onedir
|
|
||||||
platform: linux
|
|
||||||
arch: <{ os.arch }>
|
|
||||||
nox-version: <{ nox_version }>
|
|
||||||
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
|
|
||||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
|
||||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
|
||||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
|
||||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
|
||||||
workflow-slug: <{ workflow_slug }>
|
|
||||||
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
|
|
||||||
<%- if os.fips %>
|
|
||||||
fips: true
|
|
||||||
<%- endif %>
|
|
||||||
|
|
||||||
<%- endfor %>
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
<%- do conclusion_needs.append(job_name) %>
|
<%- do conclusion_needs.append(job_name) %>
|
||||||
name: Trigger Branch Workflows
|
name: Trigger Branch Workflows
|
||||||
if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
|
if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- workflow-requirements
|
- workflow-requirements
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<{ job_name }>:
|
<{ job_name }>:
|
||||||
<%- do prepare_workflow_needs.append(job_name) %>
|
<%- do prepare_workflow_needs.append(job_name) %>
|
||||||
name: Check Workflow Requirements
|
name: Check Workflow Requirements
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
outputs:
|
outputs:
|
||||||
requirements-met: ${{ steps.check-requirements.outputs.requirements-met }}
|
requirements-met: ${{ steps.check-requirements.outputs.requirements-met }}
|
||||||
steps:
|
steps:
|
||||||
|
|
414
.github/workflows/test-action-linux.yml
vendored
414
.github/workflows/test-action-linux.yml
vendored
|
@ -1,414 +0,0 @@
|
||||||
---
|
|
||||||
name: Test Artifact
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
distro-slug:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The OS slug to run tests against
|
|
||||||
nox-session:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox session to run
|
|
||||||
testrun:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: JSON string containing information about what and how to run the test suite
|
|
||||||
salt-version:
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
description: The Salt version to set prior to running tests.
|
|
||||||
cache-prefix:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: Seed used to invalidate caches
|
|
||||||
platform:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform being tested
|
|
||||||
arch:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
nox-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox version to install
|
|
||||||
timeout-minutes:
|
|
||||||
required: true
|
|
||||||
type: number
|
|
||||||
description: Timeout, in minutes, for the test job
|
|
||||||
gh-actions-python-version:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The python version to run tests with
|
|
||||||
default: "3.10"
|
|
||||||
fips:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
description: Test run with FIPS enabled
|
|
||||||
package-name:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The onedir package name to use
|
|
||||||
default: salt
|
|
||||||
skip-code-coverage:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
description: Skip code coverage
|
|
||||||
default: false
|
|
||||||
workflow-slug:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: Which workflow is running.
|
|
||||||
default: ci
|
|
||||||
|
|
||||||
env:
|
|
||||||
COLUMNS: 190
|
|
||||||
AWS_MAX_ATTEMPTS: "10"
|
|
||||||
AWS_RETRY_MODE: "adaptive"
|
|
||||||
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
|
||||||
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
|
||||||
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
generate-matrix:
|
|
||||||
name: Test Matrix
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
|
|
||||||
build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
- name: Generate Test Matrix
|
|
||||||
id: generate-matrix
|
|
||||||
run: |
|
|
||||||
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
timeout-minutes: ${{ inputs.timeout-minutes }}
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
|
|
||||||
env:
|
|
||||||
SALT_TRANSPORT: ${{ matrix.transport }}
|
|
||||||
TEST_GROUP: ${{ matrix.test-group || 1 }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: "Set `TIMESTAMP` environment variable"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Salt Version
|
|
||||||
run: |
|
|
||||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
path: artifacts/
|
|
||||||
|
|
||||||
- name: Decompress Onedir Tarball
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
|
||||||
cd artifacts
|
|
||||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
|
|
||||||
- name: Download nox.linux.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: nox-linux-${{ inputs.arch }}-${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: PyPi Proxy
|
|
||||||
run: |
|
|
||||||
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Download testrun-changed-files.txt
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-changed-files.txt
|
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Start VM
|
|
||||||
id: spin-up-vm
|
|
||||||
env:
|
|
||||||
TESTS_CHUNK: ${{ matrix.tests-chunk }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: List Free Space
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
|
|
||||||
|
|
||||||
- name: Upload Checkout To VM
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm rsync ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Decompress .nox Directory
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Show System Info
|
|
||||||
run: |
|
|
||||||
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
|
|
||||||
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Changed Tests
|
|
||||||
id: run-fast-changed-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --core-tests --slow-tests --suppress-no-test-exit-code \
|
|
||||||
--from-filenames=testrun-changed-files.txt
|
|
||||||
|
|
||||||
- name: Run Fast Tests
|
|
||||||
id: run-fast-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
|
|
||||||
${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Slow Tests
|
|
||||||
id: run-slow-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests
|
|
||||||
|
|
||||||
- name: Run Core Tests
|
|
||||||
id: run-core-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests
|
|
||||||
|
|
||||||
- name: Run Flaky Tests
|
|
||||||
id: run-flaky-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail
|
|
||||||
|
|
||||||
- name: Run Full Tests
|
|
||||||
id: run-full-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
|
|
||||||
-E TEST_GROUP ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \
|
|
||||||
--test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
|
|
||||||
|
|
||||||
- name: Combine Coverage Reports
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Download Test Run Artifacts
|
|
||||||
id: download-artifacts-from-vm
|
|
||||||
if: always() && steps.spin-up-vm.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
|
|
||||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
|
||||||
# from it showing in the tree command below
|
|
||||||
rm -rf artifacts/salt*
|
|
||||||
tree -a artifacts
|
|
||||||
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
|
|
||||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}${{ inputs.fips && '.fips' || '' }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }}
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Destroy VM
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
|
|
||||||
|
|
||||||
- name: Upload Code Coverage Test Run Artifacts
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/coverage/
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
- name: Upload JUnit XML Test Run Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/xml-unittests-output/
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
- name: Upload Test Run Log Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/logs
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
report:
|
|
||||||
name: Test Reports
|
|
||||||
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- test
|
|
||||||
- generate-matrix
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Merge JUnit XML Test Run Artifacts
|
|
||||||
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Merge Log Test Run Artifacts
|
|
||||||
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Merge Code Coverage Test Run Artifacts
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Download Code Coverage Test Run Artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
id: download-coverage-artifacts
|
|
||||||
with:
|
|
||||||
path: artifacts/coverage/
|
|
||||||
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Show Downloaded Test Run Artifacts
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
run: |
|
|
||||||
tree -a artifacts
|
|
||||||
|
|
||||||
- name: Install Nox
|
|
||||||
run: |
|
|
||||||
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
|
||||||
|
|
||||||
- name: Create XML Coverage Reports
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
|
|
||||||
run: |
|
|
||||||
nox --force-color -e create-xml-coverage-reports
|
|
||||||
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}${{ inputs.fips && '..fips' || '' }}..${{ inputs.nox-session }}.xml
|
|
||||||
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}${{ inputs.fips && '..fips' || '' }}..${{ inputs.nox-session }}.xml
|
|
||||||
|
|
||||||
- name: Report Salt Code Coverage
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
nox --force-color -e report-coverage -- salt
|
|
||||||
|
|
||||||
- name: Report Combined Code Coverage
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
nox --force-color -e report-coverage
|
|
||||||
|
|
||||||
- name: Rename Code Coverage DB
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}${{ inputs.fips && '.fips' || '' }}.${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: Upload Code Coverage DB
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
|
|
||||||
path: artifacts/coverage
|
|
||||||
include-hidden-files: true
|
|
449
.github/workflows/test-action-macos.yml
vendored
449
.github/workflows/test-action-macos.yml
vendored
|
@ -1,449 +0,0 @@
|
||||||
---
|
|
||||||
name: Test Artifact(macOS)
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
distro-slug:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The OS slug to run tests against
|
|
||||||
runner:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The GitHub runner name
|
|
||||||
nox-session:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox session to run
|
|
||||||
testrun:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: JSON string containing information about what and how to run the test suite
|
|
||||||
gh-actions-python-version:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The python version to run tests with
|
|
||||||
default: "3.11"
|
|
||||||
salt-version:
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
description: The Salt version to set prior to running tests.
|
|
||||||
cache-prefix:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: Seed used to invalidate caches
|
|
||||||
platform:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform being tested
|
|
||||||
arch:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
nox-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox version to install
|
|
||||||
timeout-minutes:
|
|
||||||
required: true
|
|
||||||
type: number
|
|
||||||
description: Timeout, in minutes, for the test job
|
|
||||||
package-name:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The onedir package name to use
|
|
||||||
default: salt
|
|
||||||
skip-code-coverage:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
description: Skip code coverage
|
|
||||||
default: false
|
|
||||||
workflow-slug:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: Which workflow is running.
|
|
||||||
default: ci
|
|
||||||
|
|
||||||
env:
|
|
||||||
COLUMNS: 190
|
|
||||||
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
|
||||||
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
|
||||||
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
generate-matrix:
|
|
||||||
name: Test Matrix
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
|
|
||||||
build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
- name: Generate Test Matrix
|
|
||||||
id: generate-matrix
|
|
||||||
run: |
|
|
||||||
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test
|
|
||||||
runs-on: ${{ inputs.runner }}
|
|
||||||
timeout-minutes: ${{ inputs.timeout-minutes }}
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
|
|
||||||
env:
|
|
||||||
SALT_TRANSPORT: ${{ matrix.transport }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: "Set `TIMESTAMP` environment variable"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Salt Version
|
|
||||||
run: |
|
|
||||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
path: artifacts/
|
|
||||||
|
|
||||||
- name: Decompress Onedir Tarball
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
|
||||||
cd artifacts
|
|
||||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
|
|
||||||
- name: Install System Dependencies
|
|
||||||
run: |
|
|
||||||
brew install tree
|
|
||||||
|
|
||||||
- name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: nox-macos-${{ inputs.arch }}-${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: Set up Python ${{ inputs.gh-actions-python-version }}
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "${{ inputs.gh-actions-python-version }}"
|
|
||||||
|
|
||||||
- name: Install Nox
|
|
||||||
run: |
|
|
||||||
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
- name: Decompress .nox Directory
|
|
||||||
run: |
|
|
||||||
nox --force-color -e decompress-dependencies -- macos ${{ inputs.arch }}
|
|
||||||
|
|
||||||
- name: Download testrun-changed-files.txt
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-changed-files.txt
|
|
||||||
|
|
||||||
- name: Show System Info
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_SYSTEM_INFO_ONLY: "1"
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Changed Tests
|
|
||||||
id: run-fast-changed-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_TEST_SELECTION: "0"
|
|
||||||
PRINT_TEST_PLAN_ONLY: "0"
|
|
||||||
PRINT_SYSTEM_INFO: "0"
|
|
||||||
RERUN_FAILURES: "1"
|
|
||||||
GITHUB_ACTIONS_PIPELINE: "1"
|
|
||||||
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
|
||||||
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
|
|
||||||
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
|
|
||||||
-k "mac or darwin" --core-tests --slow-tests --suppress-no-test-exit-code \
|
|
||||||
--from-filenames=testrun-changed-files.txt
|
|
||||||
|
|
||||||
- name: Run Fast Tests
|
|
||||||
id: run-fast-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_TEST_SELECTION: "0"
|
|
||||||
PRINT_TEST_PLAN_ONLY: "0"
|
|
||||||
PRINT_SYSTEM_INFO: "0"
|
|
||||||
RERUN_FAILURES: "1"
|
|
||||||
GITHUB_ACTIONS_PIPELINE: "1"
|
|
||||||
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
|
||||||
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
|
|
||||||
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
|
|
||||||
-k "mac or darwin" --suppress-no-test-exit-code
|
|
||||||
|
|
||||||
- name: Run Slow Tests
|
|
||||||
id: run-slow-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_TEST_SELECTION: "0"
|
|
||||||
PRINT_TEST_PLAN_ONLY: "0"
|
|
||||||
PRINT_SYSTEM_INFO: "0"
|
|
||||||
RERUN_FAILURES: "1"
|
|
||||||
GITHUB_ACTIONS_PIPELINE: "1"
|
|
||||||
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
|
||||||
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
|
|
||||||
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
|
|
||||||
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests
|
|
||||||
|
|
||||||
- name: Run Core Tests
|
|
||||||
id: run-core-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_TEST_SELECTION: "0"
|
|
||||||
PRINT_TEST_PLAN_ONLY: "0"
|
|
||||||
PRINT_SYSTEM_INFO: "0"
|
|
||||||
RERUN_FAILURES: "1"
|
|
||||||
GITHUB_ACTIONS_PIPELINE: "1"
|
|
||||||
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
|
||||||
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
|
|
||||||
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
|
|
||||||
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests
|
|
||||||
|
|
||||||
- name: Run Flaky Tests
|
|
||||||
id: run-flaky-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_TEST_SELECTION: "0"
|
|
||||||
PRINT_TEST_PLAN_ONLY: "0"
|
|
||||||
PRINT_SYSTEM_INFO: "0"
|
|
||||||
RERUN_FAILURES: "1"
|
|
||||||
GITHUB_ACTIONS_PIPELINE: "1"
|
|
||||||
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
|
||||||
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
|
|
||||||
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
|
|
||||||
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --flaky-jail
|
|
||||||
|
|
||||||
- name: Run Full Tests
|
|
||||||
id: run-full-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_TEST_SELECTION: "0"
|
|
||||||
PRINT_TEST_PLAN_ONLY: "0"
|
|
||||||
PRINT_SYSTEM_INFO: "0"
|
|
||||||
RERUN_FAILURES: "1"
|
|
||||||
GITHUB_ACTIONS_PIPELINE: "1"
|
|
||||||
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
|
||||||
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
|
|
||||||
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
|
|
||||||
--slow-tests --core-tests -k "mac or darwin"
|
|
||||||
|
|
||||||
- name: Fix file ownership
|
|
||||||
run: |
|
|
||||||
sudo chown -R "$(id -un)" .
|
|
||||||
|
|
||||||
- name: Combine Coverage Reports
|
|
||||||
if: always() && inputs.skip-code-coverage == false
|
|
||||||
run: |
|
|
||||||
nox --force-color -e combine-coverage
|
|
||||||
|
|
||||||
- name: Prepare Test Run Artifacts
|
|
||||||
id: download-artifacts-from-vm
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
|
||||||
# from it showing in the tree command below
|
|
||||||
rm -rf artifacts/salt*
|
|
||||||
tree -a artifacts
|
|
||||||
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
|
|
||||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Upload Code Coverage Test Run Artifacts
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/coverage/
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
- name: Upload JUnit XML Test Run Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/xml-unittests-output/
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
- name: Upload Test Run Log Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/logs
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
report:
|
|
||||||
name: Test Reports
|
|
||||||
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- test
|
|
||||||
- generate-matrix
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Merge JUnit XML Test Run Artifacts
|
|
||||||
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Merge Log Test Run Artifacts
|
|
||||||
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Merge Code Coverage Test Run Artifacts
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Download Code Coverage Test Run Artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
id: download-coverage-artifacts
|
|
||||||
with:
|
|
||||||
path: artifacts/coverage/
|
|
||||||
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Show Downloaded Test Run Artifacts
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
run: |
|
|
||||||
tree -a artifacts
|
|
||||||
|
|
||||||
- name: Set up Python ${{ inputs.gh-actions-python-version }}
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "${{ inputs.gh-actions-python-version }}"
|
|
||||||
|
|
||||||
- name: Install Nox
|
|
||||||
run: |
|
|
||||||
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
|
||||||
|
|
||||||
- name: Create XML Coverage Reports
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
|
|
||||||
run: |
|
|
||||||
nox --force-color -e create-xml-coverage-reports
|
|
||||||
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
|
|
||||||
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
|
|
||||||
|
|
||||||
- name: Report Salt Code Coverage
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
nox --force-color -e report-coverage -- salt
|
|
||||||
|
|
||||||
- name: Report Combined Code Coverage
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
nox --force-color -e report-coverage
|
|
||||||
|
|
||||||
- name: Rename Code Coverage DB
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: Upload Code Coverage DB
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
|
|
||||||
path: artifacts/coverage
|
|
||||||
include-hidden-files: true
|
|
415
.github/workflows/test-action-windows.yml
vendored
415
.github/workflows/test-action-windows.yml
vendored
|
@ -1,415 +0,0 @@
|
||||||
---
|
|
||||||
name: Test Artifact
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
distro-slug:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The OS slug to run tests against
|
|
||||||
nox-session:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox session to run
|
|
||||||
testrun:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: JSON string containing information about what and how to run the test suite
|
|
||||||
salt-version:
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
description: The Salt version to set prior to running tests.
|
|
||||||
cache-prefix:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: Seed used to invalidate caches
|
|
||||||
platform:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform being tested
|
|
||||||
arch:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
nox-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox version to install
|
|
||||||
timeout-minutes:
|
|
||||||
required: true
|
|
||||||
type: number
|
|
||||||
description: Timeout, in minutes, for the test job
|
|
||||||
gh-actions-python-version:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The python version to run tests with
|
|
||||||
default: "3.10"
|
|
||||||
fips:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
description: Test run with FIPS enabled
|
|
||||||
package-name:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The onedir package name to use
|
|
||||||
default: salt
|
|
||||||
skip-code-coverage:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
description: Skip code coverage
|
|
||||||
default: false
|
|
||||||
workflow-slug:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: Which workflow is running.
|
|
||||||
default: ci
|
|
||||||
|
|
||||||
env:
|
|
||||||
COLUMNS: 190
|
|
||||||
AWS_MAX_ATTEMPTS: "10"
|
|
||||||
AWS_RETRY_MODE: "adaptive"
|
|
||||||
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
|
||||||
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
|
||||||
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
generate-matrix:
|
|
||||||
name: Test Matrix
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
|
|
||||||
build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
- name: Generate Test Matrix
|
|
||||||
id: generate-matrix
|
|
||||||
run: |
|
|
||||||
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
timeout-minutes: ${{ inputs.timeout-minutes }}
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
|
|
||||||
env:
|
|
||||||
SALT_TRANSPORT: ${{ matrix.transport }}
|
|
||||||
TEST_GROUP: ${{ matrix.test-group || 1 }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: "Set `TIMESTAMP` environment variable"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Salt Version
|
|
||||||
run: |
|
|
||||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
path: artifacts/
|
|
||||||
|
|
||||||
- name: Decompress Onedir Tarball
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
|
||||||
cd artifacts
|
|
||||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
|
|
||||||
- name: Download nox.windows.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: nox-windows-${{ inputs.arch }}-${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: PyPi Proxy
|
|
||||||
run: |
|
|
||||||
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Download testrun-changed-files.txt
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-changed-files.txt
|
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Start VM
|
|
||||||
id: spin-up-vm
|
|
||||||
env:
|
|
||||||
TESTS_CHUNK: ${{ matrix.tests-chunk }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: List Free Space
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
|
|
||||||
|
|
||||||
- name: Upload Checkout To VM
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm rsync ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Decompress .nox Directory
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Show System Info
|
|
||||||
run: |
|
|
||||||
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
|
|
||||||
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Changed Tests
|
|
||||||
id: run-fast-changed-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --core-tests --slow-tests --suppress-no-test-exit-code \
|
|
||||||
--from-filenames=testrun-changed-files.txt
|
|
||||||
|
|
||||||
- name: Run Fast Tests
|
|
||||||
id: run-fast-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
|
|
||||||
${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Slow Tests
|
|
||||||
id: run-slow-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests
|
|
||||||
|
|
||||||
- name: Run Core Tests
|
|
||||||
id: run-core-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests
|
|
||||||
|
|
||||||
- name: Run Flaky Tests
|
|
||||||
id: run-flaky-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail
|
|
||||||
|
|
||||||
- name: Run Full Tests
|
|
||||||
id: run-full-tests
|
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
|
||||||
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
|
|
||||||
-E TEST_GROUP ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \
|
|
||||||
--test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
|
|
||||||
|
|
||||||
- name: Combine Coverage Reports
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Download Test Run Artifacts
|
|
||||||
id: download-artifacts-from-vm
|
|
||||||
if: always() && steps.spin-up-vm.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
|
|
||||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
|
||||||
# from it showing in the tree command below
|
|
||||||
rm -rf artifacts/salt*
|
|
||||||
tree -a artifacts
|
|
||||||
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
|
|
||||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }}
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Destroy VM
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
|
|
||||||
|
|
||||||
- name: Upload Code Coverage Test Run Artifacts
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/coverage/
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
- name: Upload JUnit XML Test Run Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/xml-unittests-output/
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
- name: Upload Test Run Log Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/logs
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
|
|
||||||
report:
|
|
||||||
name: Test Reports
|
|
||||||
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- test
|
|
||||||
- generate-matrix
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Merge JUnit XML Test Run Artifacts
|
|
||||||
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Merge Log Test Run Artifacts
|
|
||||||
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Merge Code Coverage Test Run Artifacts
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
|
|
||||||
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
|
|
||||||
separate-directories: false
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Download Code Coverage Test Run Artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
id: download-coverage-artifacts
|
|
||||||
with:
|
|
||||||
path: artifacts/coverage/
|
|
||||||
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Show Downloaded Test Run Artifacts
|
|
||||||
if: ${{ inputs.skip-code-coverage == false }}
|
|
||||||
run: |
|
|
||||||
tree -a artifacts
|
|
||||||
|
|
||||||
- name: Install Nox
|
|
||||||
run: |
|
|
||||||
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
|
||||||
|
|
||||||
- name: Create XML Coverage Reports
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
|
|
||||||
run: |
|
|
||||||
nox --force-color -e create-xml-coverage-reports
|
|
||||||
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml || true
|
|
||||||
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml || true
|
|
||||||
|
|
||||||
- name: Report Salt Code Coverage
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
nox --force-color -e report-coverage -- salt
|
|
||||||
|
|
||||||
- name: Report Combined Code Coverage
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
nox --force-color -e report-coverage
|
|
||||||
|
|
||||||
- name: Rename Code Coverage DB
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: Upload Code Coverage DB
|
|
||||||
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
|
|
||||||
path: artifacts/coverage
|
|
||||||
include-hidden-files: true
|
|
1392
.github/workflows/test-action.yml
vendored
Normal file
1392
.github/workflows/test-action.yml
vendored
Normal file
File diff suppressed because it is too large
Load diff
|
@ -88,9 +88,7 @@ jobs:
|
||||||
needs:
|
needs:
|
||||||
- generate-matrix
|
- generate-matrix
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- ubuntu-latest
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
environment: ${{ inputs.environment }}
|
environment: ${{ inputs.environment }}
|
||||||
|
@ -497,9 +495,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
USE_S3_CACHE: 'true'
|
USE_S3_CACHE: 'true'
|
||||||
runs-on:
|
runs-on:
|
||||||
- self-hosted
|
- ubuntu-latest
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
environment: ${{ inputs.environment }}
|
environment: ${{ inputs.environment }}
|
||||||
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
|
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
|
||||||
strategy:
|
strategy:
|
||||||
|
|
279
.github/workflows/test-packages-action-linux.yml
vendored
279
.github/workflows/test-packages-action-linux.yml
vendored
|
@ -1,279 +0,0 @@
|
||||||
name: Test Artifact
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
distro-slug:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The OS slug to run tests against
|
|
||||||
platform:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform being tested
|
|
||||||
arch:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
pkg-type:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
salt-version:
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
description: The Salt version of the packages to install and test
|
|
||||||
cache-prefix:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: Seed used to invalidate caches
|
|
||||||
testing-releases:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: A JSON list of releases to test upgrades against
|
|
||||||
nox-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox version to install
|
|
||||||
python-version:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The python version to run tests with
|
|
||||||
default: "3.10"
|
|
||||||
fips:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
description: Test run with FIPS enabled
|
|
||||||
package-name:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The onedir package name to use
|
|
||||||
default: salt
|
|
||||||
nox-session:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The nox session to run
|
|
||||||
default: ci-test-onedir
|
|
||||||
skip-code-coverage:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
description: Skip code coverage
|
|
||||||
default: false
|
|
||||||
|
|
||||||
env:
|
|
||||||
COLUMNS: 190
|
|
||||||
AWS_MAX_ATTEMPTS: "10"
|
|
||||||
AWS_RETRY_MODE: "adaptive"
|
|
||||||
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
|
||||||
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
|
||||||
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
|
||||||
USE_S3_CACHE: 'true'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
generate-matrix:
|
|
||||||
name: Generate Matrix
|
|
||||||
runs-on:
|
|
||||||
# We need to run on our self-hosted runners because we need proper credentials
|
|
||||||
# for boto3 to scan through our repositories.
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- x86_64
|
|
||||||
outputs:
|
|
||||||
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
|
|
||||||
build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Generate Package Test Matrix
|
|
||||||
id: generate-pkg-matrix
|
|
||||||
run: |
|
|
||||||
tools ci pkg-matrix ${{ inputs.distro-slug }} \
|
|
||||||
${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
|
|
||||||
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: "Set `TIMESTAMP` environment variable"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download Packages
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
|
|
||||||
path: artifacts/pkg/
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
path: artifacts/
|
|
||||||
|
|
||||||
- name: Decompress Onedir Tarball
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
|
||||||
cd artifacts
|
|
||||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
|
|
||||||
- name: List Packages
|
|
||||||
run: |
|
|
||||||
tree artifacts/pkg/
|
|
||||||
|
|
||||||
- name: Download nox.linux.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: nox-linux-${{ inputs.arch }}-${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Start VM
|
|
||||||
id: spin-up-vm
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: List Free Space
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
|
|
||||||
|
|
||||||
- name: Upload Checkout To VM
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm rsync ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Decompress .nox Directory
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Show System Info
|
|
||||||
run: |
|
|
||||||
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
|
|
||||||
--nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Package Tests
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ inputs.fips && '--fips ' || '' }}\
|
|
||||||
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \
|
|
||||||
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
|
|
||||||
|
|
||||||
- name: Download Test Run Artifacts
|
|
||||||
id: download-artifacts-from-vm
|
|
||||||
if: always() && steps.spin-up-vm.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
|
|
||||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
|
||||||
# from it showing in the tree command below
|
|
||||||
rm -rf artifacts/salt*
|
|
||||||
tree -a artifacts
|
|
||||||
|
|
||||||
- name: Destroy VM
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
|
|
||||||
|
|
||||||
- name: Upload Test Run Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/
|
|
||||||
!artifacts/pkg/*
|
|
||||||
!artifacts/salt/*
|
|
||||||
!artifacts/salt-*.tar.*
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
report:
|
|
||||||
name: Report
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
- test
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Wait For Artifacts
|
|
||||||
run: |
|
|
||||||
sleep 60
|
|
||||||
|
|
||||||
- name: Merge Test Run Artifacts
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}
|
|
||||||
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}-*
|
|
||||||
separate-directories: true
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Wait For Artifacts 2
|
|
||||||
run: |
|
|
||||||
sleep 60
|
|
||||||
|
|
||||||
- name: Download Test Run Artifacts
|
|
||||||
id: download-test-run-artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
path: artifacts/
|
|
||||||
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
|
|
||||||
- name: Show Test Run Artifacts
|
|
||||||
if: always() && steps.download-test-run-artifacts.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tree -a artifacts
|
|
280
.github/workflows/test-packages-action-macos.yml
vendored
280
.github/workflows/test-packages-action-macos.yml
vendored
|
@ -1,280 +0,0 @@
|
||||||
name: Test Artifact
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
distro-slug:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The OS slug to run tests against
|
|
||||||
runner:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The GitHub runner name
|
|
||||||
platform:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform being tested
|
|
||||||
arch:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
pkg-type:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
salt-version:
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
description: The Salt version of the packages to install and test
|
|
||||||
cache-prefix:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: Seed used to invalidate caches
|
|
||||||
testing-releases:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: A JSON list of releases to test upgrades against
|
|
||||||
nox-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox version to install
|
|
||||||
python-version:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The python version to run tests with
|
|
||||||
default: "3.10"
|
|
||||||
package-name:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The onedir package name to use
|
|
||||||
default: salt
|
|
||||||
nox-session:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The nox session to run
|
|
||||||
default: ci-test-onedir
|
|
||||||
skip-code-coverage:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
description: Skip code coverage
|
|
||||||
default: false
|
|
||||||
|
|
||||||
env:
|
|
||||||
COLUMNS: 190
|
|
||||||
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
|
||||||
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
|
||||||
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
generate-matrix:
|
|
||||||
name: Generate Matrix
|
|
||||||
runs-on:
|
|
||||||
# We need to run on our self-hosted runners because we need proper credentials
|
|
||||||
# for boto3 to scan through our repositories.
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- x86_64
|
|
||||||
outputs:
|
|
||||||
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
|
|
||||||
build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Generate Package Test Matrix
|
|
||||||
id: generate-pkg-matrix
|
|
||||||
run: |
|
|
||||||
tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
|
|
||||||
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test
|
|
||||||
runs-on: ${{ inputs.runner }}
|
|
||||||
timeout-minutes: 150 # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time)
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: "Set `TIMESTAMP` environment variable"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download Packages
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
|
|
||||||
path: artifacts/pkg/
|
|
||||||
|
|
||||||
- name: Install System Dependencies
|
|
||||||
run: |
|
|
||||||
brew install tree
|
|
||||||
|
|
||||||
- name: List Packages
|
|
||||||
run: |
|
|
||||||
tree artifacts/pkg/
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
path: artifacts/
|
|
||||||
|
|
||||||
- name: Decompress Onedir Tarball
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
|
||||||
cd artifacts
|
|
||||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
|
|
||||||
- name: Set up Python ${{ inputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "${{ inputs.python-version }}"
|
|
||||||
|
|
||||||
- name: Install Nox
|
|
||||||
run: |
|
|
||||||
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
||||||
|
|
||||||
- name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: nox-macos-${{ inputs.arch }}-${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: Decompress .nox Directory
|
|
||||||
run: |
|
|
||||||
nox --force-color -e decompress-dependencies -- macos ${{ inputs.arch }}
|
|
||||||
|
|
||||||
- name: Show System Info
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_SYSTEM_INFO_ONLY: "1"
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Package Tests
|
|
||||||
env:
|
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
|
||||||
PRINT_TEST_SELECTION: "0"
|
|
||||||
PRINT_TEST_PLAN_ONLY: "0"
|
|
||||||
PRINT_SYSTEM_INFO: "0"
|
|
||||||
RERUN_FAILURES: "1"
|
|
||||||
GITHUB_ACTIONS_PIPELINE: "1"
|
|
||||||
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
|
||||||
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
|
|
||||||
run: |
|
|
||||||
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
|
|
||||||
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
|
|
||||||
|
|
||||||
- name: Fix file ownership
|
|
||||||
run: |
|
|
||||||
sudo chown -R "$(id -un)" .
|
|
||||||
|
|
||||||
- name: Prepare Test Run Artifacts
|
|
||||||
id: download-artifacts-from-vm
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
|
||||||
# from it showing in the tree command below
|
|
||||||
rm -rf artifacts/salt*
|
|
||||||
tree -a artifacts
|
|
||||||
|
|
||||||
- name: Upload Test Run Artifacts
|
|
||||||
if: always()
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/
|
|
||||||
!artifacts/pkg/*
|
|
||||||
!artifacts/salt/*
|
|
||||||
!artifacts/salt-*.tar.*
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
report:
|
|
||||||
name: Report
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
- test
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Wait For Artifacts
|
|
||||||
run: |
|
|
||||||
sleep 60
|
|
||||||
|
|
||||||
- name: Merge Test Run Artifacts
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
with:
|
|
||||||
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
|
|
||||||
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-*
|
|
||||||
separate-directories: true
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Wait For Artifacts 2
|
|
||||||
run: |
|
|
||||||
sleep 60
|
|
||||||
|
|
||||||
- name: Download Test Run Artifacts
|
|
||||||
id: download-test-run-artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
path: artifacts/
|
|
||||||
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Show Test Run Artifacts
|
|
||||||
if: always() && steps.download-test-run-artifacts.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tree -a artifacts
|
|
||||||
|
|
||||||
- name: Set up Python ${{ inputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "${{ inputs.python-version }}"
|
|
||||||
|
|
||||||
- name: Install Nox
|
|
||||||
run: |
|
|
||||||
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
|
||||||
env:
|
|
||||||
PIP_INDEX_URL: https://pypi.org/simple
|
|
277
.github/workflows/test-packages-action-windows.yml
vendored
277
.github/workflows/test-packages-action-windows.yml
vendored
|
@ -1,277 +0,0 @@
|
||||||
name: Test Artifact
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
distro-slug:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The OS slug to run tests against
|
|
||||||
platform:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform being tested
|
|
||||||
arch:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
pkg-type:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The platform arch being tested
|
|
||||||
salt-version:
|
|
||||||
type: string
|
|
||||||
required: true
|
|
||||||
description: The Salt version of the packages to install and test
|
|
||||||
cache-prefix:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: Seed used to invalidate caches
|
|
||||||
testing-releases:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: A JSON list of releases to test upgrades against
|
|
||||||
nox-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
description: The nox version to install
|
|
||||||
python-version:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The python version to run tests with
|
|
||||||
default: "3.10"
|
|
||||||
fips:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
description: Test run with FIPS enabled
|
|
||||||
package-name:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The onedir package name to use
|
|
||||||
default: salt
|
|
||||||
nox-session:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
description: The nox session to run
|
|
||||||
default: ci-test-onedir
|
|
||||||
skip-code-coverage:
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
description: Skip code coverage
|
|
||||||
default: false
|
|
||||||
|
|
||||||
env:
|
|
||||||
COLUMNS: 190
|
|
||||||
AWS_MAX_ATTEMPTS: "10"
|
|
||||||
AWS_RETRY_MODE: "adaptive"
|
|
||||||
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
|
||||||
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
|
||||||
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
|
||||||
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
generate-matrix:
|
|
||||||
name: Generate Matrix
|
|
||||||
runs-on:
|
|
||||||
# We need to run on our self-hosted runners because we need proper credentials
|
|
||||||
# for boto3 to scan through our repositories.
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- x86_64
|
|
||||||
outputs:
|
|
||||||
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
|
|
||||||
build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Generate Package Test Matrix
|
|
||||||
id: generate-pkg-matrix
|
|
||||||
run: |
|
|
||||||
tools ci pkg-matrix ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
|
|
||||||
${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
|
|
||||||
|
|
||||||
|
|
||||||
test:
|
|
||||||
name: Test
|
|
||||||
runs-on:
|
|
||||||
- self-hosted
|
|
||||||
- linux
|
|
||||||
- bastion
|
|
||||||
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: "Set `TIMESTAMP` environment variable"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download Packages
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
|
|
||||||
path: artifacts/pkg/
|
|
||||||
|
|
||||||
- name: Download Onedir Tarball as an Artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
path: artifacts/
|
|
||||||
|
|
||||||
- name: Decompress Onedir Tarball
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
|
||||||
cd artifacts
|
|
||||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
|
||||||
|
|
||||||
- name: List Packages
|
|
||||||
run: |
|
|
||||||
tree artifacts/pkg/
|
|
||||||
|
|
||||||
- name: Download nox.windows.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: nox-windows-${{ inputs.arch }}-${{ inputs.nox-session }}
|
|
||||||
|
|
||||||
- name: Setup Python Tools Scripts
|
|
||||||
uses: ./.github/actions/setup-python-tools-scripts
|
|
||||||
with:
|
|
||||||
cache-prefix: ${{ inputs.cache-prefix }}
|
|
||||||
|
|
||||||
- name: Get Salt Project GitHub Actions Bot Environment
|
|
||||||
run: |
|
|
||||||
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
|
|
||||||
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
|
|
||||||
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
- name: Start VM
|
|
||||||
id: spin-up-vm
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: List Free Space
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
|
|
||||||
|
|
||||||
- name: Upload Checkout To VM
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm rsync ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Decompress .nox Directory
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
|
|
||||||
|
|
||||||
- name: Show System Info
|
|
||||||
run: |
|
|
||||||
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
|
|
||||||
--nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }}
|
|
||||||
|
|
||||||
- name: Run Package Tests
|
|
||||||
run: |
|
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ matrix.fips && '--fips ' || '' }}\
|
|
||||||
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \
|
|
||||||
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
|
|
||||||
|
|
||||||
- name: Download Test Run Artifacts
|
|
||||||
id: download-artifacts-from-vm
|
|
||||||
if: always() && steps.spin-up-vm.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
|
|
||||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
|
||||||
# from it showing in the tree command below
|
|
||||||
rm -rf artifacts/salt*
|
|
||||||
tree -a artifacts
|
|
||||||
|
|
||||||
- name: Destroy VM
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
|
|
||||||
|
|
||||||
- name: Upload Test Run Artifacts
|
|
||||||
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
|
|
||||||
path: |
|
|
||||||
artifacts/
|
|
||||||
!artifacts/pkg/*
|
|
||||||
!artifacts/salt/*
|
|
||||||
!artifacts/salt-*.tar.*
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
report:
|
|
||||||
name: Report
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
|
|
||||||
needs:
|
|
||||||
- generate-matrix
|
|
||||||
- test
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout Source Code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: "Throttle Builds"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
|
||||||
|
|
||||||
- name: Wait For Artifacts
|
|
||||||
run: |
|
|
||||||
sleep 60
|
|
||||||
|
|
||||||
- name: Merge Test Run Artifacts
|
|
||||||
uses: actions/upload-artifact/merge@v4
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
|
|
||||||
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-*
|
|
||||||
separate-directories: true
|
|
||||||
delete-merged: true
|
|
||||||
|
|
||||||
- name: Wait For Artifacts 2
|
|
||||||
run: |
|
|
||||||
sleep 60
|
|
||||||
|
|
||||||
- name: Download Test Run Artifacts
|
|
||||||
id: download-test-run-artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
path: artifacts/
|
|
||||||
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Show Test Run Artifacts
|
|
||||||
if: always() && steps.download-test-run-artifacts.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
tree -a artifacts
|
|
503
.github/workflows/test-packages-action.yml
vendored
Normal file
503
.github/workflows/test-packages-action.yml
vendored
Normal file
|
@ -0,0 +1,503 @@
|
||||||
|
---
|
||||||
|
name: Test Packages
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
salt-version:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: The Salt version of the packages to install and test
|
||||||
|
cache-prefix:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Seed used to invalidate caches
|
||||||
|
testing-releases:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: A JSON list of releases to test upgrades against
|
||||||
|
nox-version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: The nox version to install
|
||||||
|
python-version:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
description: The python version to run tests with
|
||||||
|
default: "3.10"
|
||||||
|
nox-session:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
description: The nox session to run
|
||||||
|
default: ci-test-onedir
|
||||||
|
skip-code-coverage:
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
description: Skip code coverage
|
||||||
|
default: false
|
||||||
|
package-name:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
description: The onedir package name to use
|
||||||
|
default: salt
|
||||||
|
matrix:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
linux_arm_runner:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
description: Json job matrix config
|
||||||
|
|
||||||
|
env:
|
||||||
|
COLUMNS: 190
|
||||||
|
AWS_MAX_ATTEMPTS: "10"
|
||||||
|
AWS_RETRY_MODE: "adaptive"
|
||||||
|
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
|
||||||
|
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
|
||||||
|
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
|
||||||
|
PIP_DISABLE_PIP_VERSION_CHECK: "1"
|
||||||
|
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
|
||||||
|
USE_S3_CACHE: 'false'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
test-linux:
|
||||||
|
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
|
||||||
|
runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
|
||||||
|
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
|
||||||
|
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.matrix)['linux'] }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: "Throttle Builds"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
||||||
|
|
||||||
|
- name: "Set `TIMESTAMP` environment variable"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
||||||
|
|
||||||
|
- name: Checkout Source Code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download Packages
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ matrix.arch }}-${{ matrix.pkg_type }}
|
||||||
|
path: artifacts/pkg/
|
||||||
|
|
||||||
|
- name: Download Onedir Tarball as an Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
|
||||||
|
path: artifacts/
|
||||||
|
|
||||||
|
- name: Decompress Onedir Tarball
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||||
|
cd artifacts
|
||||||
|
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
|
||||||
|
|
||||||
|
- name: Set up Python ${{ inputs.python-version }}
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "${{ inputs.python-version }}"
|
||||||
|
|
||||||
|
- name: Install Nox
|
||||||
|
run: |
|
||||||
|
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
||||||
|
env:
|
||||||
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
|
|
||||||
|
- name: List Packages
|
||||||
|
run: |
|
||||||
|
tree artifacts/pkg/
|
||||||
|
|
||||||
|
- name: Download nox.linux.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }}
|
||||||
|
|
||||||
|
- name: "Pull container ${{ matrix.container }}"
|
||||||
|
run: |
|
||||||
|
docker pull ${{ matrix.container }}
|
||||||
|
|
||||||
|
- name: "Create container ${{ matrix.container }}"
|
||||||
|
run: |
|
||||||
|
/usr/bin/docker create --name ${{ github.run_id }}_salt-test-pkg --workdir /__w/salt/salt --privileged -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true -v "/var/run/docker.sock":"/var/run/docker.sock" -v "/home/runner/work":"/__w" -v "/home/runner/work/_temp":"/__w/_temp" -v "/home/runner/work/_actions":"/__w/_actions" -v "/opt/hostedtoolcache":"/__t" -v "/home/runner/work/_temp/_github_home":"/github/home" -v "/home/runner/work/_temp/_github_workflow":"/github/workflow" --entrypoint "/usr/lib/systemd/systemd" ${{ matrix.container }} --systemd --unit rescue.target
|
||||||
|
|
||||||
|
- name: "Start container ${{ matrix.container }}"
|
||||||
|
run: |
|
||||||
|
/usr/bin/docker start ${{ github.run_id }}_salt-test-pkg
|
||||||
|
|
||||||
|
- name: Decompress .nox Directory
|
||||||
|
run: |
|
||||||
|
docker exec ${{ github.run_id}}_salt-test-pkg python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }}
|
||||||
|
|
||||||
|
- name: Setup Python Tools Scripts
|
||||||
|
uses: ./.github/actions/setup-python-tools-scripts
|
||||||
|
with:
|
||||||
|
cache-prefix: ${{ inputs.cache-prefix }}
|
||||||
|
|
||||||
|
- name: List Free Space
|
||||||
|
run: |
|
||||||
|
df -h || true
|
||||||
|
|
||||||
|
- name: Show System Info
|
||||||
|
env:
|
||||||
|
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||||
|
PRINT_SYSTEM_INFO_ONLY: "1"
|
||||||
|
run: |
|
||||||
|
docker exec ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
|
||||||
|
|
||||||
|
- name: Run Package Tests
|
||||||
|
env:
|
||||||
|
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||||
|
RERUN_FAILURES: "1"
|
||||||
|
GITHUB_ACTIONS_PIPELINE: "1"
|
||||||
|
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
||||||
|
COVERAGE_CONTEXT: ${{ matrix.slug }}
|
||||||
|
run: |
|
||||||
|
/usr/bin/docker exec ${{ github.run_id }}_salt-test-pkg \
|
||||||
|
python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
|
||||||
|
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
|
||||||
|
|
||||||
|
- name: Upload Test Run Log Artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
|
||||||
|
path: |
|
||||||
|
artifacts/logs
|
||||||
|
include-hidden-files: true
|
||||||
|
|
||||||
|
- name: Upload Test Run Artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
|
||||||
|
path: |
|
||||||
|
artifacts/
|
||||||
|
!artifacts/pkg/*
|
||||||
|
!artifacts/salt/*
|
||||||
|
!artifacts/salt-*.tar.*
|
||||||
|
include-hidden-files: true
|
||||||
|
|
||||||
|
test-macos:
|
||||||
|
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
|
||||||
|
timeout-minutes: 150 # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time)
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.matrix)['macos'] }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: "Throttle Builds"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
||||||
|
|
||||||
|
- name: "Set `TIMESTAMP` environment variable"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
||||||
|
|
||||||
|
- name: Checkout Source Code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download Packages
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos
|
||||||
|
path: artifacts/pkg/
|
||||||
|
|
||||||
|
- name: Install System Dependencies
|
||||||
|
run: |
|
||||||
|
brew install tree
|
||||||
|
|
||||||
|
- name: List Packages
|
||||||
|
run: |
|
||||||
|
tree artifacts/pkg/
|
||||||
|
|
||||||
|
- name: Download Onedir Tarball as an Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
|
||||||
|
path: artifacts/
|
||||||
|
|
||||||
|
- name: Decompress Onedir Tarball
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||||
|
cd artifacts
|
||||||
|
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
|
||||||
|
|
||||||
|
- name: Set up Python ${{ inputs.python-version }}
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "${{ inputs.python-version }}"
|
||||||
|
|
||||||
|
- name: Install Nox
|
||||||
|
run: |
|
||||||
|
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
||||||
|
env:
|
||||||
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
|
|
||||||
|
- name: Download nox.macos.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
|
||||||
|
|
||||||
|
- name: Decompress .nox Directory
|
||||||
|
run: |
|
||||||
|
nox --force-color -e decompress-dependencies -- macos ${{ matrix.arch }}
|
||||||
|
|
||||||
|
- name: Show System Info
|
||||||
|
env:
|
||||||
|
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||||
|
PRINT_SYSTEM_INFO_ONLY: "1"
|
||||||
|
run: |
|
||||||
|
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
|
||||||
|
|
||||||
|
- name: Run Package Tests
|
||||||
|
env:
|
||||||
|
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||||
|
PRINT_TEST_SELECTION: "0"
|
||||||
|
PRINT_TEST_PLAN_ONLY: "0"
|
||||||
|
PRINT_SYSTEM_INFO: "0"
|
||||||
|
RERUN_FAILURES: "1"
|
||||||
|
GITHUB_ACTIONS_PIPELINE: "1"
|
||||||
|
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
||||||
|
COVERAGE_CONTEXT: ${{ matrix.slug }}
|
||||||
|
run: |
|
||||||
|
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
|
||||||
|
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
|
||||||
|
|
||||||
|
- name: Fix file ownership
|
||||||
|
run: |
|
||||||
|
sudo chown -R "$(id -un)" .
|
||||||
|
|
||||||
|
- name: Prepare Test Run Artifacts
|
||||||
|
id: download-artifacts-from-vm
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
# Delete the salt onedir, we won't need it anymore and it will prevent
|
||||||
|
# from it showing in the tree command below
|
||||||
|
rm -rf artifacts/salt*
|
||||||
|
tree -a artifacts
|
||||||
|
|
||||||
|
- name: Upload Test Run Artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: pkg-testrun-artifacts-${{ matrix.slug }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
|
||||||
|
path: |
|
||||||
|
artifacts/
|
||||||
|
!artifacts/pkg/*
|
||||||
|
!artifacts/salt/*
|
||||||
|
!artifacts/salt-*.tar.*
|
||||||
|
include-hidden-files: true
|
||||||
|
|
||||||
|
|
||||||
|
test-windows:
|
||||||
|
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
|
||||||
|
runs-on: ${{ matrix.slug }}
|
||||||
|
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
|
||||||
|
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.matrix)['windows'] }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Set up Python ${{ inputs.python-version }}
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "${{ inputs.python-version }}"
|
||||||
|
|
||||||
|
- name: "Throttle Builds"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
|
||||||
|
|
||||||
|
|
||||||
|
- name: "Set `TIMESTAMP` environment variable"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
|
||||||
|
|
||||||
|
- name: Checkout Source Code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download Packages
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ matrix.arch }}-${{ matrix.pkg_type }}
|
||||||
|
path: ./artifacts/pkg/
|
||||||
|
|
||||||
|
- name: Download Onedir Tarball as an Artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
|
||||||
|
path: ./artifacts/
|
||||||
|
|
||||||
|
- name: Decompress Onedir Tarball
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||||
|
cd artifacts
|
||||||
|
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
|
||||||
|
|
||||||
|
- name: Install Nox
|
||||||
|
run: |
|
||||||
|
python3 -m pip install 'nox==${{ inputs.nox-version }}'
|
||||||
|
env:
|
||||||
|
PIP_INDEX_URL: https://pypi.org/simple
|
||||||
|
|
||||||
|
- run: python3 --version
|
||||||
|
|
||||||
|
- name: Download nox.windows.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nox-windows-${{ matrix.arch }}-${{ inputs.nox-session }}
|
||||||
|
|
||||||
|
- name: Decompress .nox Directory
|
||||||
|
run: |
|
||||||
|
nox --force-color -e decompress-dependencies -- windows ${{ matrix.arch }}
|
||||||
|
|
||||||
|
- name: List Important Directories
|
||||||
|
run: |
|
||||||
|
dir d:/
|
||||||
|
dir .
|
||||||
|
dir artifacts/
|
||||||
|
dir artifacts/pkg
|
||||||
|
dir .nox/ci-test-onedir/Scripts
|
||||||
|
|
||||||
|
- name: Check onedir python
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
artifacts/salt/Scripts/python.exe --version
|
||||||
|
|
||||||
|
- name: Check nox python
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
.nox/ci-test-onedir/Scripts/python.exe --version
|
||||||
|
|
||||||
|
- name: Show System Info
|
||||||
|
env:
|
||||||
|
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||||
|
SKIP_CODE_COVERAGE: "1"
|
||||||
|
PRINT_SYSTEM_INFO_ONLY: "1"
|
||||||
|
PYTHONUTF8: "1"
|
||||||
|
run: |
|
||||||
|
nox --force-color -f noxfile.py -e "${{ inputs.nox-session }}-pkgs" -- '${{ matrix.tests-chunk }}' --log-cli-level=debug
|
||||||
|
|
||||||
|
- name: Run Package Tests
|
||||||
|
env:
|
||||||
|
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||||
|
PRINT_TEST_SELECTION: "0"
|
||||||
|
PRINT_TEST_PLAN_ONLY: "0"
|
||||||
|
PRINT_SYSTEM_INFO: "0"
|
||||||
|
RERUN_FAILURES: "1"
|
||||||
|
GITHUB_ACTIONS_PIPELINE: "1"
|
||||||
|
SKIP_INITIAL_ONEDIR_FAILURES: "1"
|
||||||
|
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
|
||||||
|
COVERAGE_CONTEXT: ${{ matrix.slug }}
|
||||||
|
OUTPUT_COLUMNS: "190"
|
||||||
|
PYTHONUTF8: "1"
|
||||||
|
run: >
|
||||||
|
nox --force-color -f noxfile.py -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
|
||||||
|
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
|
||||||
|
|
||||||
|
- name: Prepare Test Run Artifacts
|
||||||
|
id: download-artifacts-from-vm
|
||||||
|
if: always()
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Delete the salt onedir, we won't need it anymore and it will prevent
|
||||||
|
# from it showing in the tree command below
|
||||||
|
rm -rf artifacts/salt*
|
||||||
|
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
|
||||||
|
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload Test Run Log Artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
|
||||||
|
path: |
|
||||||
|
artifacts/logs
|
||||||
|
include-hidden-files: true
|
||||||
|
|
||||||
|
- name: Upload Test Run Artifacts
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: pkg-testrun-artifacts-${{ matrix.slug }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
|
||||||
|
path: |
|
||||||
|
artifacts/
|
||||||
|
!artifacts/pkg/*
|
||||||
|
!artifacts/salt/*
|
||||||
|
!artifacts/salt-*.tar.*
|
||||||
|
include-hidden-files: true
|
||||||
|
|
||||||
|
report:
|
||||||
|
name: Report
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
if: ${{ false }}
|
||||||
|
needs:
|
||||||
|
- test-linux
|
||||||
|
- test-macos
|
||||||
|
- test-windows
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJSON(inputs.matrix)['linux'] }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout Source Code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: "Throttle Builds"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
|
||||||
|
|
||||||
|
- name: Wait For Artifacts
|
||||||
|
run: |
|
||||||
|
sleep 60
|
||||||
|
|
||||||
|
- name: Merge Test Run Artifacts
|
||||||
|
continue-on-error: true
|
||||||
|
uses: actions/upload-artifact/merge@v4
|
||||||
|
with:
|
||||||
|
name: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}
|
||||||
|
pattern: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-*
|
||||||
|
separate-directories: true
|
||||||
|
delete-merged: true
|
||||||
|
|
||||||
|
- name: Wait For Artifacts 2
|
||||||
|
run: |
|
||||||
|
sleep 60
|
||||||
|
|
||||||
|
- name: Download Test Run Artifacts
|
||||||
|
id: download-test-run-artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: artifacts/
|
||||||
|
pattern: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Show Test Run Artifacts
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
tree -a artifacts
|
|
@ -59,7 +59,7 @@ repos:
|
||||||
- id: tools
|
- id: tools
|
||||||
alias: generate-workflows
|
alias: generate-workflows
|
||||||
name: Generate GitHub Workflow Templates
|
name: Generate GitHub Workflow Templates
|
||||||
files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/.*)$
|
files: ^(cicd/shared-gh-workflows-context\.yml|tools/utils/__init__.py|tools/precommit/workflows\.py|.github/workflows/.*)$
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
args:
|
args:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
|
|
112
AUTHORS
112
AUTHORS
|
@ -8,114 +8,28 @@ Whos Who in Salt
|
||||||
The Man With the Plan
|
The Man With the Plan
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Thomas S. Hatch is the main developer of Salt. He is the founder, owner,
|
Thomas S. Hatch is the creator of Salt. He was the founder, owner,
|
||||||
maintainer and lead of the Salt project, as well as author of the majority
|
maintainer that lead Salt project, as well as author of the majority
|
||||||
of the Salt code and documentation.
|
of initial Salt code and documentation.
|
||||||
|
|
||||||
|
SaltStack, Inc. was acquired by VMware in 2020. In 2023, VMware was
|
||||||
|
acquired by Broadcom.
|
||||||
|
|
||||||
|
The Salt Project core team of developers are employed by Broadcom.
|
||||||
|
|
||||||
Documentation System
|
Documentation System
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
The documentation system was put together by Seth House, much of the
|
The initial documentation system was put together by Seth House.
|
||||||
documentation is being maintained by Seth.
|
|
||||||
|
|
||||||
Developers
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Aaron Bull Schaefer <aaron@elasticdog.com>
|
|
||||||
Aaron Toponce <aaron.toponce@gmail.com>
|
|
||||||
Andrew Hammond <andrew.george.hammond@gmail.com>
|
|
||||||
Aditya Kulkarni <adi@saltstack.com>
|
|
||||||
Alexander Pyatkin <asp@thexyz.net>
|
|
||||||
Andre Sachs <andre@sachs.nom.za>
|
|
||||||
Andrew Colin Kissa <andrew@topdog.za.net>
|
|
||||||
Andrew Kuhnhausen <trane@errstr.com>
|
|
||||||
Antti Kaihola <akaihol+github@ambitone.com>
|
|
||||||
archme <archme.mail@gmail.com>
|
|
||||||
Brad Barden <brad@mifflinet.net>
|
|
||||||
Bret Palsson <bretep@gmail.com>
|
|
||||||
Brian Wagner <wags@wagsworld.net>
|
|
||||||
C. R. Oldham <cr@saltstack.com>
|
|
||||||
Carl Loa Odin <carlodin@gmail.com>
|
|
||||||
Carlo Pires <carlopires@gmail.com>
|
|
||||||
Chris Rebert <chris.rebert@hulu.com>
|
|
||||||
Chris Scheller <schelcj@umich.edu>
|
|
||||||
Christer Edwards <christer.edwards@gmail.com>
|
|
||||||
Clint Savage <herlo1@gmail.com>
|
|
||||||
Colton Myers <cmyers@saltstack.com>
|
|
||||||
Corey Quinn <corey@sequestered.net>
|
|
||||||
Corin Kochenower <ckochenower@saltstack.com>
|
|
||||||
Dan Garthwaite <dan@garthwaite.org>
|
|
||||||
Daniel Wallace <danielwallace at gtmanfred dot com>
|
|
||||||
David Boucha <boucha@gmail.com>
|
|
||||||
David Pravec <alekibango@pravec.tk>
|
|
||||||
deutsche
|
|
||||||
Dmitry Kuzmenko <dkuzmenko@saltstack.com>
|
|
||||||
Doug Renn <renn@nestegg.com>
|
|
||||||
Eivind Uggedal <eivind@uggedal.com>
|
|
||||||
epoelke@gmail.com <epoelke@heartflow.com>
|
|
||||||
Eric Poelke <epoelke@gmail.com>
|
|
||||||
Erik Nolte <enolte@beyondoblivion.com>
|
|
||||||
Evan Borgstrom <evan@fatbox.ca>
|
|
||||||
Forrest Alvarez <forrest.alvarez@gmail.com>
|
|
||||||
Fred Reimer <freimer@freimer.org>
|
|
||||||
Henrik Holmboe <henrik@holmboe.se>
|
|
||||||
Gareth J. Greenaway <gareth@wiked.org>
|
|
||||||
Jacob Albretsen <jakea@xmission.com>
|
|
||||||
Jed Glazner <jglazner@coldcrow.com>
|
|
||||||
Jeff Bauer <jbauer@rubic.com>
|
|
||||||
Jeff Hutchins <jhutchins@getjive.com>
|
|
||||||
Jeffrey C. Ollie <jeff@ocjtech.us>
|
|
||||||
Jeff Schroeder <jeffschroeder@computer.org>
|
|
||||||
Johnny Bergström
|
|
||||||
Jonas Buckner <buckner.jonas@gmail.com>
|
|
||||||
Jonathan Harker <k.jonathan.harker@hp.com>
|
|
||||||
Joseph Hall <joseph@saltstack.com>
|
|
||||||
Josmar Dias <josmarnet@gmail.com>
|
|
||||||
Kent Tenney <ktenney@gmail.com>
|
|
||||||
lexual
|
|
||||||
Marat Shakirov
|
|
||||||
Marc Abramowitz <marc+github@marc-abramowitz.com>
|
|
||||||
Martin Schnabel <mb0@mb0.org>
|
|
||||||
Mathieu Le Marec - Pasquet <kiorky@cryptelium.net>
|
|
||||||
Matt Black
|
|
||||||
Matthew Printz <hipokrit@gmail.com>
|
|
||||||
Matthias Teege <matthias-git@mteege.de>
|
|
||||||
Maxim Burgerhout <maxim@wzzrd.com>
|
|
||||||
Mickey Malone <mickey.malone@gmail.com>
|
|
||||||
Michael Steed <msteed@saltstack.com>
|
|
||||||
Mike Place <mp@saltstack.com>
|
|
||||||
Mircea Ulinic <ping@mirceaulinic.net>
|
|
||||||
Mitch Anderson <mitch@metauser.net>
|
|
||||||
Mostafa Hussein <mostafa.hussein91@gmail.com>
|
|
||||||
Nathaniel Whiteinge <seth@eseth.com>
|
|
||||||
Nicolas Delaby <nicolas.delaby@ezeep.com>
|
|
||||||
Nicole Thomas <nicole@saltstack.com>
|
|
||||||
Nigel Owen <nigelowen2.gmail.com>
|
|
||||||
Nitin Madhok <nmadhok@g.clemson.edu>
|
|
||||||
Oleg Anashkin <oleg.anashkin@gmail.com>
|
|
||||||
Pedro Algarvio <pedro@algarvio.me>
|
|
||||||
Peter Baumgartner
|
|
||||||
Pierre Carrier <pierre@spotify.com>
|
|
||||||
Rhys Elsmore <me@rhys.io>
|
|
||||||
Rafael Caricio <rafael@caricio.com>
|
|
||||||
Robert Fielding
|
|
||||||
Sean Channel <pentabular@gmail.com>
|
|
||||||
Seth House <seth@eseth.com>
|
|
||||||
Seth Vidal <skvidal@fedoraproject.org>
|
|
||||||
Stas Alekseev <stas.alekseev@gmail.com>
|
|
||||||
Thibault Cohen <titilambert@gmail.com>
|
|
||||||
Thomas Schreiber <tom@rizumu.us>
|
|
||||||
Thomas S Hatch <thatch45@gmail.com>
|
|
||||||
Tor Hveem <xt@bash.no>
|
|
||||||
Travis Cline <travis.cline@gmail.com>
|
|
||||||
Wieland Hoffmann <themineo+github@gmail.com>
|
|
||||||
|
|
||||||
|
Documentation is now primarily maintained by the Salt Project core team and
|
||||||
|
community members.
|
||||||
|
|
||||||
Growing Community
|
Growing Community
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
Salt is a rapidly growing project with a large community, to view all
|
Salt is a rapidly growing project with a large community, and has had more than
|
||||||
contributors please check Github, this file can sometimes be out of date:
|
2,400 contributors over the years. To view all contributors, please check Github:
|
||||||
|
|
||||||
https://github.com/saltstack/salt/graphs/contributors
|
https://github.com/saltstack/salt/graphs/contributors
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ representative at an online or offline event.
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
reported to the community leaders responsible for enforcement at
|
reported to the community leaders responsible for enforcement at
|
||||||
conduct@saltstack.com.
|
saltproject.pdl@broadcom.com.
|
||||||
All complaints will be reviewed and investigated promptly and fairly.
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
All community leaders are obligated to respect the privacy and security of the
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
|
|
@ -8,7 +8,7 @@ ways you can help improve Salt:
|
||||||
- Use Salt and report bugs with clear, detailed descriptions.
|
- Use Salt and report bugs with clear, detailed descriptions.
|
||||||
- Join a `working group <https://github.com/saltstack/community>`__ to
|
- Join a `working group <https://github.com/saltstack/community>`__ to
|
||||||
collaborate with other contributors.
|
collaborate with other contributors.
|
||||||
- Answer questions on platforms like `IRC <https://web.libera.chat/#salt>`__,
|
- Answer questions on platforms like
|
||||||
the `community Discord <https://discord.com/invite/J7b7EscrAs>`__,
|
the `community Discord <https://discord.com/invite/J7b7EscrAs>`__,
|
||||||
the `salt-users mailing list <https://groups.google.com/forum/#!forum/salt-users>`__,
|
the `salt-users mailing list <https://groups.google.com/forum/#!forum/salt-users>`__,
|
||||||
`Server Fault <https://serverfault.com/questions/tagged/saltstack>`__,
|
`Server Fault <https://serverfault.com/questions/tagged/saltstack>`__,
|
||||||
|
@ -135,7 +135,7 @@ Then activate it:
|
||||||
|
|
||||||
Sweet! Now you're ready to clone Salt so you can start hacking away! If
|
Sweet! Now you're ready to clone Salt so you can start hacking away! If
|
||||||
you get stuck at any point, check out the resources at the beginning of
|
you get stuck at any point, check out the resources at the beginning of
|
||||||
this guide. IRC and Discord are particularly helpful places to go.
|
this guide. Discord and GitHub Discussions are particularly helpful places to go.
|
||||||
|
|
||||||
|
|
||||||
Get the source!
|
Get the source!
|
||||||
|
@ -631,7 +631,7 @@ your PR is submitted during the week you should be able to expect some
|
||||||
kind of communication within that business day. If your tests are
|
kind of communication within that business day. If your tests are
|
||||||
passing and we're not in a code freeze, ideally your code will be merged
|
passing and we're not in a code freeze, ideally your code will be merged
|
||||||
that week or month. If you haven't heard from your assigned reviewer, ping them
|
that week or month. If you haven't heard from your assigned reviewer, ping them
|
||||||
on GitHub, `irc <https://web.libera.chat/#salt>`__, or Community Discord.
|
on GitHub or `Community Discord <https://discord.com/invite/J7b7EscrAs>`__.
|
||||||
|
|
||||||
It's likely that your reviewer will leave some comments that need
|
It's likely that your reviewer will leave some comments that need
|
||||||
addressing - it may be a style change, or you forgot a changelog entry,
|
addressing - it may be a style change, or you forgot a changelog entry,
|
||||||
|
|
40
README.rst
40
README.rst
|
@ -6,18 +6,10 @@
|
||||||
:alt: PyPi Package Downloads
|
:alt: PyPi Package Downloads
|
||||||
:target: https://pypi.org/project/salt
|
:target: https://pypi.org/project/salt
|
||||||
|
|
||||||
.. image:: https://img.shields.io/lgtm/grade/python/github/saltstack/salt
|
|
||||||
:alt: PyPi Package Downloads
|
|
||||||
:target: https://lgtm.com/projects/g/saltstack/salt/context:python
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/discord-SaltProject-blue.svg?logo=discord
|
.. image:: https://img.shields.io/badge/discord-SaltProject-blue.svg?logo=discord
|
||||||
:alt: Salt Project Discord Community
|
:alt: Salt Project Discord Community
|
||||||
:target: https://discord.com/invite/J7b7EscrAs
|
:target: https://discord.com/invite/J7b7EscrAs
|
||||||
|
|
||||||
.. image:: https://img.shields.io/twitch/status/saltprojectoss
|
|
||||||
:alt: Salt Project Twitch Channel
|
|
||||||
:target: https://www.twitch.tv/saltprojectoss
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/reddit/subreddit-subscribers/saltstack?style=social
|
.. image:: https://img.shields.io/reddit/subreddit-subscribers/saltstack?style=social
|
||||||
:alt: Salt Project subreddit
|
:alt: Salt Project subreddit
|
||||||
:target: https://www.reddit.com/r/saltstack/
|
:target: https://www.reddit.com/r/saltstack/
|
||||||
|
@ -71,20 +63,21 @@ In addition to configuration management Salt can also:
|
||||||
|
|
||||||
About our sponsors
|
About our sponsors
|
||||||
==================
|
==================
|
||||||
Salt powers VMware's `VMware Aria Automation Config`_
|
|
||||||
(previously vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found
|
Salt powers VMware by Broadcom's `Tanzu Salt`_
|
||||||
|
(previously Aria Automation Config / vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found
|
||||||
under the hood of products from Juniper, Cisco, Cloudflare, Nutanix, SUSE, and
|
under the hood of products from Juniper, Cisco, Cloudflare, Nutanix, SUSE, and
|
||||||
Tieto, to name a few.
|
Tieto, to name a few.
|
||||||
|
|
||||||
The original sponsor of our community, SaltStack, was `acquired by VMware in 2020 <https://www.vmware.com/company/acquisitions/saltstack.html>`_.
|
The original sponsor of our community, SaltStack, was acquired by VMware in 2020.
|
||||||
The Salt Project remains an open source ecosystem that VMware supports and
|
`VMware was later acquired by Broadcom in 2023 <https://investors.broadcom.com/news-releases/news-release-details/broadcom-completes-acquisition-vmware>`__.
|
||||||
contributes to. VMware ensures the code integrity and quality of the Salt
|
The Salt Project remains an open source ecosystem that Broadcom supports and
|
||||||
|
contributes to. Broadcom ensures the code integrity and quality of the Salt
|
||||||
modules by acting as the official sponsor and manager of the Salt project. Many
|
modules by acting as the official sponsor and manager of the Salt project. Many
|
||||||
of the core Salt Project contributors are also VMware employees. This team
|
of the core Salt Project contributors are also Broadcom employees. This team
|
||||||
carefully reviews and enhances the Salt modules to ensure speed, quality, and
|
carefully reviews and enhances the Salt modules to ensure speed, quality, and
|
||||||
security.
|
security.
|
||||||
|
|
||||||
|
|
||||||
Download and install Salt
|
Download and install Salt
|
||||||
=========================
|
=========================
|
||||||
Salt is tested and packaged to run on CentOS, Debian, RHEL, Ubuntu, MacOS,
|
Salt is tested and packaged to run on CentOS, Debian, RHEL, Ubuntu, MacOS,
|
||||||
|
@ -93,9 +86,11 @@ Windows, and more. Download Salt and get started now. See
|
||||||
for more information.
|
for more information.
|
||||||
|
|
||||||
To download and install Salt, see:
|
To download and install Salt, see:
|
||||||
* `The Salt install guide <https://docs.saltproject.io/salt/install-guide/en/latest/index.html>`_
|
|
||||||
* `Salt Project repository <https://repo.saltproject.io/>`_
|
|
||||||
|
|
||||||
|
* `The Salt install guide <https://docs.saltproject.io/salt/install-guide/en/latest/index.html>`_
|
||||||
|
* `Salt Project Repository: Linux (RPM) <https://packages.broadcom.com/artifactory/saltproject-rpm>`__ - Where Salt ``rpm`` packages are officially stored and distributed.
|
||||||
|
* `Salt Project Repository: Linux (DEB) <https://packages.broadcom.com/artifactory/saltproject-deb>`__ - Where Salt ``deb`` packages are officially stored and distributed.
|
||||||
|
* `Salt Project Repository: GENERIC <https://packages.broadcom.com/artifactory/saltproject-generic>`__ - Where Salt Windows, macOS, etc. (non-rpm, non-deb) packages are officially stored and distributed.
|
||||||
|
|
||||||
Technical support
|
Technical support
|
||||||
=================
|
=================
|
||||||
|
@ -153,11 +148,9 @@ Please be sure to review our
|
||||||
`Code of Conduct <https://github.com/saltstack/salt/blob/master/CODE_OF_CONDUCT.md>`_.
|
`Code of Conduct <https://github.com/saltstack/salt/blob/master/CODE_OF_CONDUCT.md>`_.
|
||||||
Also, check out some of our community resources including:
|
Also, check out some of our community resources including:
|
||||||
|
|
||||||
* `Salt Project Community Wiki <https://github.com/saltstack/community/wiki>`_
|
|
||||||
* `Salt Project Community Discord`_
|
* `Salt Project Community Discord`_
|
||||||
* `Salt Project: IRC on LiberaChat <https://web.libera.chat/#salt>`_
|
|
||||||
* `Salt Project YouTube channel <https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg>`_
|
* `Salt Project YouTube channel <https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg>`_
|
||||||
* `Salt Project Twitch channel <https://www.twitch.tv/saltprojectoss>`_
|
* `Salt Project Community Notes and Wiki <https://github.com/saltstack/community/>`_
|
||||||
|
|
||||||
There are lots of ways to get involved in our community. Every month, there are
|
There are lots of ways to get involved in our community. Every month, there are
|
||||||
around a dozen opportunities to meet with other contributors and the Salt Core
|
around a dozen opportunities to meet with other contributors and the Salt Core
|
||||||
|
@ -165,10 +158,9 @@ team and collaborate in real time. The best way to keep track is by subscribing
|
||||||
to the **Salt Project Community Events Calendar** on the main
|
to the **Salt Project Community Events Calendar** on the main
|
||||||
`<https://saltproject.io>`_ website.
|
`<https://saltproject.io>`_ website.
|
||||||
|
|
||||||
If you have additional questions, email us at saltproject@vmware.com or reach out
|
If you have additional questions, email us at saltproject.pdl@broadcom.com or reach out
|
||||||
directly to the Community Discord. We'd be glad to have you join our community!
|
directly to the Community Discord. We'd be glad to have you join our community!
|
||||||
|
|
||||||
|
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
Salt is licensed under the Apache 2.0 license. Please
|
Salt is licensed under the Apache 2.0 license. Please
|
||||||
|
@ -181,9 +173,7 @@ A complete list of attributions and dependencies can be found here:
|
||||||
`salt/DEPENDENCIES.md <https://github.com/saltstack/salt/blob/master/DEPENDENCIES.md>`_
|
`salt/DEPENDENCIES.md <https://github.com/saltstack/salt/blob/master/DEPENDENCIES.md>`_
|
||||||
|
|
||||||
.. _Salt Project Community Discord: https://discord.com/invite/J7b7EscrAs
|
.. _Salt Project Community Discord: https://discord.com/invite/J7b7EscrAs
|
||||||
.. _VMware Aria Automation Config: https://www.vmware.com/products/vrealize-automation/saltstack-config.html
|
.. _Tanzu Salt: https://www.vmware.com/products/app-platform/tanzu-salt
|
||||||
.. _Latest Salt Documentation: https://docs.saltproject.io/en/latest/
|
.. _Latest Salt Documentation: https://docs.saltproject.io/en/latest/
|
||||||
.. _Open an issue: https://github.com/saltstack/salt/issues/new/choose
|
.. _Open an issue: https://github.com/saltstack/salt/issues/new/choose
|
||||||
.. _SECURITY.md: https://github.com/saltstack/salt/blob/master/SECURITY.md
|
.. _SECURITY.md: https://github.com/saltstack/salt/blob/master/SECURITY.md
|
||||||
.. _Calendar html: https://outlook.office365.com/owa/calendar/105f69bacd4541baa849529aed37eb2d@vmware.com/434ec2155b2b4cce90144c87f0dd03d56626754050155294962/calendar.html
|
|
||||||
.. _Calendar ics: https://outlook.office365.com/owa/calendar/105f69bacd4541baa849529aed37eb2d@vmware.com/434ec2155b2b4cce90144c87f0dd03d56626754050155294962/calendar.ics
|
|
||||||
|
|
25
SUPPORT.rst
25
SUPPORT.rst
|
@ -1,15 +1,8 @@
|
||||||
Get SaltStack Support and Help
|
Get Salt Project Support and Help
|
||||||
==============================
|
=================================
|
||||||
|
|
||||||
**IRC Chat** - Join the vibrant, helpful and positive SaltStack chat room in
|
**Salt Project Discord** - Join the Salt Project Community Discord!
|
||||||
LiberaChat at #salt. There is no need to introduce yourself, or ask permission
|
Use the following link to join the Discord server:
|
||||||
to join in, just help and be helped! Make sure to wait for an answer, sometimes
|
|
||||||
it may take a few moments for someone to reply.
|
|
||||||
|
|
||||||
`<https://web.libera.chat/#salt>`_
|
|
||||||
|
|
||||||
**SaltStack Slack** - Alongside IRC is our SaltStack Community Discord for the
|
|
||||||
SaltStack Working groups. Use the following link to request an invitation.
|
|
||||||
|
|
||||||
`<https://discord.com/invite/J7b7EscrAs>`_
|
`<https://discord.com/invite/J7b7EscrAs>`_
|
||||||
|
|
||||||
|
@ -20,13 +13,13 @@ anyone can help answer. Join the conversation!
|
||||||
`<https://groups.google.com/forum/#!forum/salt-users>`_
|
`<https://groups.google.com/forum/#!forum/salt-users>`_
|
||||||
|
|
||||||
You may subscribe to the list without a Google account by emailing
|
You may subscribe to the list without a Google account by emailing
|
||||||
salt-users+subscribe@googlegroups.com and you may post to the list by emailing
|
``salt-users+subscribe@googlegroups.com`` and you may post to the list by emailing
|
||||||
salt-users@googlegroups.com
|
``salt-users@googlegroups.com``
|
||||||
|
|
||||||
**Reporting Issues** - To report an issue with Salt, please follow the
|
**Reporting Issues** - To report an issue with Salt, please follow the
|
||||||
guidelines for filing bug reports:
|
guidelines for filing bug reports:
|
||||||
`<https://docs.saltproject.io/en/master/topics/development/reporting_bugs.html>`_
|
`<https://docs.saltproject.io/en/master/topics/development/reporting_bugs.html>`_
|
||||||
|
|
||||||
**SaltStack Support** - If you need dedicated, prioritized support, please
|
**Salt Project Support** - If you need dedicated, prioritized support, please
|
||||||
consider a SaltStack Support package that fits your needs:
|
consider taking a look at the Enterprise product:
|
||||||
`<http://www.saltstack.com/support>`_
|
`Tanzu Salt <https://www.vmware.com/products/app-platform/tanzu-salt>`__
|
||||||
|
|
|
@ -1,13 +1,9 @@
|
||||||
nox_version: "2022.8.7"
|
nox_version: "2022.8.7"
|
||||||
python_version: "3.10.15"
|
python_version: "3.10.15"
|
||||||
relenv_version: "0.17.3"
|
relenv_version: "0.18.0"
|
||||||
release_branches:
|
release_branches:
|
||||||
- "3006.x"
|
- "3006.x"
|
||||||
- "3007.x"
|
- "3007.x"
|
||||||
mandatory_os_slugs:
|
mandatory_os_slugs:
|
||||||
- rockylinux-9
|
- ubuntu-22.04
|
||||||
- amazonlinux-2023-arm64
|
- ubuntu-22.04-arm64
|
||||||
- photonos-5-arm64
|
|
||||||
- macos-12
|
|
||||||
- ubuntu-24.04-arm64
|
|
||||||
- windows-2022
|
|
||||||
|
|
4
doc/_themes/saltstack/layout.html
vendored
4
doc/_themes/saltstack/layout.html
vendored
|
@ -248,8 +248,8 @@
|
||||||
</div>
|
</div>
|
||||||
<div class="footerCol">
|
<div class="footerCol">
|
||||||
<h4>Community</h4>
|
<h4>Community</h4>
|
||||||
<a href="http://saltstack.org">saltstack.org</a>
|
<a href="http://saltproject.io">saltproject.io</a>
|
||||||
<a href="http://docs.saltstack.org/en/latest/">Documentation</a>
|
<a href="http://docs.saltproject.io/en/latest/">Documentation</a>
|
||||||
<!-- <a href="#">Blogs</a> -->
|
<!-- <a href="#">Blogs</a> -->
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
17
doc/_themes/saltstack2/layout.html
vendored
17
doc/_themes/saltstack2/layout.html
vendored
|
@ -163,16 +163,11 @@
|
||||||
<!-- Collect the nav links, forms, and other content for toggling -->
|
<!-- Collect the nav links, forms, and other content for toggling -->
|
||||||
<div class="collapse navbar-collapse" id="navbarCollapse">
|
<div class="collapse navbar-collapse" id="navbarCollapse">
|
||||||
<ul class="nav navbar-nav">
|
<ul class="nav navbar-nav">
|
||||||
<li><a href="/en/latest/">Overview</a></li>
|
<li><a href="/en/latest/">Overview</a></li>
|
||||||
<li><a href="https://docs.saltproject.io/salt/user-guide/en/latest/">Salt User Guide</a></li>
|
<li><a href="https://docs.saltproject.io/salt/user-guide/en/latest/">Salt User Guide</a></li>
|
||||||
<li><a href="/en/latest/contents.html">Documentation</a></li>
|
<li><a href="/en/latest/contents.html">Documentation</a></li>
|
||||||
<li><a href="https://repo.saltproject.io">Downloads</a></li>
|
<li><a href="https://packages.broadcom.com/artifactory/saltproject-generic/">Downloads</a></li>
|
||||||
<li><a href="/en/latest/topics/development/">Develop</a></li>
|
<li><a href="/en/latest/topics/development/">Develop</a></li>
|
||||||
<!--<li><a href="/en/2016.3/faq/">FAQ</a></li>
|
|
||||||
<li><a href="/en/2016.3/samples/">Code Samples</a></li>-->
|
|
||||||
<!-- <li><a href="https://repo.saltproject.io" target="_blank">Downloads</a></li>-->
|
|
||||||
<!--<li><a href="http://saltstack.com/training" target="_blank">Training</a></li>
|
|
||||||
<li><a href="http://saltstack.com/support" target="_blank">Support</a></li>-->
|
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -295,7 +290,7 @@
|
||||||
{% if on_saltstack %}
|
{% if on_saltstack %}
|
||||||
{#
|
{#
|
||||||
{% if [True, False]|random %}
|
{% if [True, False]|random %}
|
||||||
<a href="http://saltconf.com/register" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
|
<a href="http://saltproject.io" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
|
||||||
{% else %}
|
{% else %}
|
||||||
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
|
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
|
||||||
{% endif %} #}-->
|
{% endif %} #}-->
|
||||||
|
|
14
doc/conf.py
14
doc/conf.py
|
@ -182,20 +182,6 @@ rst_prolog = """\
|
||||||
.. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce
|
.. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce
|
||||||
.. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers
|
.. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers
|
||||||
.. _`salt-discord`: https://discord.com/invite/J7b7EscrAs
|
.. _`salt-discord`: https://discord.com/invite/J7b7EscrAs
|
||||||
.. |windownload| raw:: html
|
|
||||||
|
|
||||||
<p>Python3 x86: <a
|
|
||||||
href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-x86-Setup.exe"><strong>Salt-Minion-{release}-x86-Setup.exe</strong></a>
|
|
||||||
| <a href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-x86-Setup.exe.md5"><strong>md5</strong></a></p>
|
|
||||||
|
|
||||||
<p>Python3 AMD64: <a
|
|
||||||
href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-AMD64-Setup.exe"><strong>Salt-Minion-{release}-AMD64-Setup.exe</strong></a>
|
|
||||||
| <a href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-AMD64-Setup.exe.md5"><strong>md5</strong></a></p>
|
|
||||||
|
|
||||||
.. |osxdownloadpy3| raw:: html
|
|
||||||
|
|
||||||
<p>x86_64: <a href="https://repo.saltproject.io/osx/salt-{release}-py3-x86_64.pkg"><strong>salt-{release}-py3-x86_64.pkg</strong></a>
|
|
||||||
| <a href="https://repo.saltproject.io/osx/salt-{release}-py3-x86_64.pkg.md5"><strong>md5</strong></a></p>
|
|
||||||
|
|
||||||
""".format(
|
""".format(
|
||||||
release=stripped_release
|
release=stripped_release
|
||||||
|
|
|
@ -30,7 +30,7 @@ SaltStack the company does make proprietary products which use Salt and its libr
|
||||||
I think I found a bug! What should I do?
|
I think I found a bug! What should I do?
|
||||||
----------------------------------------
|
----------------------------------------
|
||||||
|
|
||||||
The salt-users mailing list as well as the salt IRC channel can both be helpful
|
The salt-users mailing list as well as the Community Discord can both be helpful
|
||||||
resources to confirm if others are seeing the issue and to assist with
|
resources to confirm if others are seeing the issue and to assist with
|
||||||
immediate debugging.
|
immediate debugging.
|
||||||
|
|
||||||
|
|
|
@ -146,10 +146,8 @@ Before installing the delta proxy minion, ensure that:
|
||||||
Install or upgrade Salt
|
Install or upgrade Salt
|
||||||
-----------------------
|
-----------------------
|
||||||
Ensure your Salt masters are running at least Salt version 3004. For instructions
|
Ensure your Salt masters are running at least Salt version 3004. For instructions
|
||||||
on installing or upgrading Salt, see `repo.saltproject.io
|
on installing or upgrading Salt, see the
|
||||||
<http://repo.saltproject.io/>`_. For RedHat systems, see `Install or Upgrade Salt
|
`Salt Install Guide <https://docs.saltproject.io/salt/install-guide/en/latest/>`__.
|
||||||
<https://enterprise.saltproject.io/en/latest/docs/install-salt.html>`_.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. _delta-proxy-install:
|
.. _delta-proxy-install:
|
||||||
|
|
|
@ -539,8 +539,6 @@ the module using the following field lists:
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
|
|
||||||
:maintainer: Thomas Hatch <thatch@saltstack.com, Seth House <shouse@saltstack.com>
|
|
||||||
:maturity: new
|
|
||||||
:depends: python-mysqldb
|
:depends: python-mysqldb
|
||||||
:platform: all
|
:platform: all
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,21 @@ which Salt Cloud is running. See
|
||||||
and using the Salt Minion Windows installer.
|
and using the Salt Minion Windows installer.
|
||||||
|
|
||||||
|
|
||||||
|
Optionally WinRM can be used instead of `winexe` if the python module `pywinrm`
|
||||||
|
is available and WinRM is supported on the target Windows version. Information
|
||||||
|
on pywinrm can be found at the project home:
|
||||||
|
|
||||||
|
* `pywinrm project home`__
|
||||||
|
|
||||||
|
.. __: https://github.com/diyan/pywinrm
|
||||||
|
|
||||||
|
Additionally, a copy of the Salt Minion Windows installer must be present on
|
||||||
|
the system on which Salt Cloud is running. This installer may be downloaded
|
||||||
|
from saltstack.com:
|
||||||
|
|
||||||
|
* `SaltStack Download Area`__
|
||||||
|
|
||||||
|
.. __: https://packages.broadcom.com/artifactory/saltproject-generic/windows/
|
||||||
|
|
||||||
.. _new-pywinrm:
|
.. _new-pywinrm:
|
||||||
|
|
||||||
|
|
|
@ -221,14 +221,10 @@ The best way to create new Formula repositories for now is to create a
|
||||||
repository in your own account on GitHub and notify a SaltStack employee when
|
repository in your own account on GitHub and notify a SaltStack employee when
|
||||||
it is ready. We will add you to the Contributors team on the
|
it is ready. We will add you to the Contributors team on the
|
||||||
`saltstack-formulas`_ organization and help you transfer the repository over.
|
`saltstack-formulas`_ organization and help you transfer the repository over.
|
||||||
Ping a SaltStack employee on IRC (`#salt`_ on LiberaChat), join the
|
Join the ``#formulas`` channel on the `salt-discord`_
|
||||||
``#formulas`` channel on the `salt-discord`_ (bridged to ``#saltstack-formulas``
|
or send an email to the `salt-users`_ mailing list.
|
||||||
on LiberaChat) or send an email to the `salt-users`_ mailing list. Note that
|
|
||||||
IRC logs are available at http://ngxbot.nginx.org/logs/%23salt/ and archives
|
|
||||||
for FreeNode (up to mid-June 2021) https://logbot-archive.s3.amazonaws.com/freenode/salt.gz
|
|
||||||
and https://logbot-archive.s3.amazonaws.com/freenode/saltstack-formulas.gz.
|
|
||||||
|
|
||||||
There are a lot of repositories in that organization! Team members can manage
|
Team members can manage
|
||||||
which repositories they are subscribed to on GitHub's watching page:
|
which repositories they are subscribed to on GitHub's watching page:
|
||||||
https://github.com/watching.
|
https://github.com/watching.
|
||||||
|
|
||||||
|
@ -246,7 +242,7 @@ your pull request has stayed open for more than a couple days feel free to
|
||||||
"selfie-merge" your own pull request.
|
"selfie-merge" your own pull request.
|
||||||
|
|
||||||
.. _`at-mention`: https://help.github.com/en/github/writing-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams
|
.. _`at-mention`: https://help.github.com/en/github/writing-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams
|
||||||
.. _`#salt`: https://web.libera.chat/#salt
|
.. _`#salt`: https://discord.com/invite/J7b7EscrAs
|
||||||
|
|
||||||
Style
|
Style
|
||||||
-----
|
-----
|
||||||
|
|
|
@ -97,7 +97,7 @@ When adding a new function or state, where possible try to use a
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
If you are uncertain what version should be used, either consult a core
|
If you are uncertain what version should be used, either consult a core
|
||||||
developer in IRC or bring this up when opening your :ref:`pull request
|
developer in the Community Discord or bring this up when opening your :ref:`pull request
|
||||||
<installing-for-development>` and a core developer will let you know what
|
<installing-for-development>` and a core developer will let you know what
|
||||||
version to add. Typically this will be the next element in the `periodic table
|
version to add. Typically this will be the next element in the `periodic table
|
||||||
<https://en.wikipedia.org/wiki/List_of_chemical_elements>`_.
|
<https://en.wikipedia.org/wiki/List_of_chemical_elements>`_.
|
||||||
|
|
30
noxfile.py
30
noxfile.py
|
@ -1284,7 +1284,10 @@ def decompress_dependencies(session):
|
||||||
if not os.path.isabs(resolved_link):
|
if not os.path.isabs(resolved_link):
|
||||||
# Relative symlinks, resolve them
|
# Relative symlinks, resolve them
|
||||||
resolved_link = os.path.join(scan_path, resolved_link)
|
resolved_link = os.path.join(scan_path, resolved_link)
|
||||||
if not os.path.exists(resolved_link):
|
prefix_check = False
|
||||||
|
if platform == "windows":
|
||||||
|
prefix_check = resolved_link.startswith("\\\\?")
|
||||||
|
if not os.path.exists(resolved_link) or prefix_check:
|
||||||
session.log("The symlink %r looks to be broken", resolved_link)
|
session.log("The symlink %r looks to be broken", resolved_link)
|
||||||
# This is a broken link, fix it
|
# This is a broken link, fix it
|
||||||
resolved_link_suffix = resolved_link.split(
|
resolved_link_suffix = resolved_link.split(
|
||||||
|
@ -1839,13 +1842,24 @@ def ci_test_onedir_pkgs(session):
|
||||||
session_warn(session, "Replacing VirtualEnv instance...")
|
session_warn(session, "Replacing VirtualEnv instance...")
|
||||||
|
|
||||||
ci_test_onedir_path = REPO_ROOT / ".nox" / "ci-test-onedir"
|
ci_test_onedir_path = REPO_ROOT / ".nox" / "ci-test-onedir"
|
||||||
session._runner.venv = VirtualEnv(
|
if hasattr(session._runner.venv, "venv_or_virtualenv"):
|
||||||
str(ci_test_onedir_path.relative_to(REPO_ROOT)),
|
venv = session._runner.venv.venv_or_virtualenv == "venv"
|
||||||
interpreter=session._runner.func.python,
|
session._runner.venv = VirtualEnv(
|
||||||
reuse_existing=True,
|
str(ci_test_onedir_path.relative_to(REPO_ROOT)),
|
||||||
venv=session._runner.venv.venv_or_virtualenv == "venv",
|
interpreter=session._runner.func.python,
|
||||||
venv_params=session._runner.venv.venv_params,
|
reuse_existing=True,
|
||||||
)
|
venv=venv,
|
||||||
|
venv_params=session._runner.venv.venv_params,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
venv = session._runner.venv.venv_backend in ("venv", "virtualenv")
|
||||||
|
session._runner.venv = VirtualEnv( # pylint: disable=unexpected-keyword-arg
|
||||||
|
str(ci_test_onedir_path.relative_to(REPO_ROOT)),
|
||||||
|
interpreter=session._runner.func.python,
|
||||||
|
reuse_existing=True,
|
||||||
|
venv_backend=session._runner.venv.venv_backend,
|
||||||
|
venv_params=session._runner.venv.venv_params,
|
||||||
|
)
|
||||||
os.environ["VIRTUAL_ENV"] = session._runner.venv.location
|
os.environ["VIRTUAL_ENV"] = session._runner.venv.location
|
||||||
session._runner.venv.create()
|
session._runner.venv.create()
|
||||||
|
|
||||||
|
|
|
@ -86,55 +86,107 @@ fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt..."
|
log "Symlink: Creating symlinks for salt..."
|
||||||
ln -sf "$INSTALL_DIR/salt" "$SBIN_DIR/salt"
|
ln -sf "$INSTALL_DIR/salt" "$SBIN_DIR/salt"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-api..."
|
log "Symlink: Creating symlinks for salt-api..."
|
||||||
ln -sf "$INSTALL_DIR/salt-api" "$SBIN_DIR/salt-api"
|
ln -sf "$INSTALL_DIR/salt-api" "$SBIN_DIR/salt-api"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-api" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-call..."
|
log "Symlink: Creating symlinks for salt-call..."
|
||||||
ln -sf "$INSTALL_DIR/salt-call" "$SBIN_DIR/salt-call"
|
ln -sf "$INSTALL_DIR/salt-call" "$SBIN_DIR/salt-call"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-call" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-cloud..."
|
log "Symlink: Creating symlinks for salt-cloud..."
|
||||||
ln -sf "$INSTALL_DIR/salt-cloud" "$SBIN_DIR/salt-cloud"
|
ln -sf "$INSTALL_DIR/salt-cloud" "$SBIN_DIR/salt-cloud"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-cloud" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-cp..."
|
log "Symlink: Creating symlinks for salt-cp..."
|
||||||
ln -sf "$INSTALL_DIR/salt-cp" "$SBIN_DIR/salt-cp"
|
ln -sf "$INSTALL_DIR/salt-cp" "$SBIN_DIR/salt-cp"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-cp" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-key..."
|
log "Symlink: Creating symlinks for salt-key..."
|
||||||
ln -sf "$INSTALL_DIR/salt-key" "$SBIN_DIR/salt-key"
|
ln -sf "$INSTALL_DIR/salt-key" "$SBIN_DIR/salt-key"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-key" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-master..."
|
log "Symlink: Creating symlinks for salt-master..."
|
||||||
ln -sf "$INSTALL_DIR/salt-master" "$SBIN_DIR/salt-master"
|
ln -sf "$INSTALL_DIR/salt-master" "$SBIN_DIR/salt-master"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-master" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-minion..."
|
log "Symlink: Creating symlinks for salt-minion..."
|
||||||
ln -sf "$INSTALL_DIR/salt-minion" "$SBIN_DIR/salt-minion"
|
ln -sf "$INSTALL_DIR/salt-minion" "$SBIN_DIR/salt-minion"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-minion" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-proxy..."
|
log "Symlink: Creating symlinks for salt-proxy..."
|
||||||
ln -sf "$INSTALL_DIR/salt-proxy" "$SBIN_DIR/salt-proxy"
|
ln -sf "$INSTALL_DIR/salt-proxy" "$SBIN_DIR/salt-proxy"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-proxy" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-run..."
|
log "Symlink: Creating symlinks for salt-run..."
|
||||||
ln -sf "$INSTALL_DIR/salt-run" "$SBIN_DIR/salt-run"
|
ln -sf "$INSTALL_DIR/salt-run" "$SBIN_DIR/salt-run"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-run" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for spm..."
|
log "Symlink: Creating symlinks for spm..."
|
||||||
ln -sf "$INSTALL_DIR/spm" "$SBIN_DIR/spm"
|
ln -sf "$INSTALL_DIR/spm" "$SBIN_DIR/spm"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-spm" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-ssh..."
|
log "Symlink: Creating symlinks for salt-ssh..."
|
||||||
ln -sf "$INSTALL_DIR/salt-ssh" "$SBIN_DIR/salt-ssh"
|
ln -sf "$INSTALL_DIR/salt-ssh" "$SBIN_DIR/salt-ssh"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-ssh" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
log "Symlink: Creating symlinks for salt-syndic..."
|
log "Symlink: Creating symlinks for salt-syndic..."
|
||||||
ln -sf "$INSTALL_DIR/salt-syndic" "$SBIN_DIR/salt-syndic"
|
ln -sf "$INSTALL_DIR/salt-syndic" "$SBIN_DIR/salt-syndic"
|
||||||
log "Symlink: Created Successfully"
|
if [ -f "$SBIN_DIR/salt-syndic" ]; then
|
||||||
|
log "Symlink: Created Successfully"
|
||||||
|
else
|
||||||
|
log "Symlink: Failed to create symlink"
|
||||||
|
fi
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
# Add salt to paths.d
|
# Add salt to paths.d
|
||||||
|
|
|
@ -20,7 +20,7 @@ Version: 0.8.3
|
||||||
Release: 0
|
Release: 0
|
||||||
License: Apache-2.0
|
License: Apache-2.0
|
||||||
Summary: The api for Salt a parallel remote execution system
|
Summary: The api for Salt a parallel remote execution system
|
||||||
Url: http://saltstack.org/
|
Url: http://saltproject.io/
|
||||||
Group: System/Monitoring
|
Group: System/Monitoring
|
||||||
Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
|
Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
|
||||||
Source1: salt-api
|
Source1: salt-api
|
||||||
|
|
|
@ -41,7 +41,7 @@ Release: 0
|
||||||
Summary: A parallel remote execution system
|
Summary: A parallel remote execution system
|
||||||
License: Apache-2.0
|
License: Apache-2.0
|
||||||
Group: System/Monitoring
|
Group: System/Monitoring
|
||||||
Url: http://saltstack.org/
|
Url: http://saltproject.io/
|
||||||
Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
|
Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
|
||||||
Source1: README.SUSE
|
Source1: README.SUSE
|
||||||
Source2: salt-tmpfiles.d
|
Source2: salt-tmpfiles.d
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
# written by David Pravec
|
|
||||||
# - feel free to /msg alekibango on IRC if you want to talk about this file
|
|
||||||
|
|
||||||
# TODO: check if --config|-c was used and use configured config file for queries
|
# TODO: check if --config|-c was used and use configured config file for queries
|
||||||
# TODO: solve somehow completion for salt -G pythonversion:[tab]
|
# TODO: solve somehow completion for salt -G pythonversion:[tab]
|
||||||
# (not sure what to do with lists)
|
# (not sure what to do with lists)
|
||||||
|
|
|
@ -46,7 +46,7 @@ $NSIS_DIR = "${env:ProgramFiles(x86)}\NSIS"
|
||||||
$NSIS_PLUG_A = "$NSIS_DIR\Plugins\x86-ansi"
|
$NSIS_PLUG_A = "$NSIS_DIR\Plugins\x86-ansi"
|
||||||
$NSIS_PLUG_U = "$NSIS_DIR\Plugins\x86-unicode"
|
$NSIS_PLUG_U = "$NSIS_DIR\Plugins\x86-unicode"
|
||||||
$NSIS_LIB_DIR = "$NSIS_DIR\Include"
|
$NSIS_LIB_DIR = "$NSIS_DIR\Include"
|
||||||
$DEPS_URL = "https://repo.saltproject.io/windows/dependencies"
|
$DEPS_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/nsis"
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
# Start the Script
|
# Start the Script
|
||||||
|
|
|
@ -46,7 +46,7 @@ $WINDOWS_DIR = "$PROJECT_DIR\pkg\windows"
|
||||||
$NSIS_DIR = "$WINDOWS_DIR\nsis"
|
$NSIS_DIR = "$WINDOWS_DIR\nsis"
|
||||||
$BUILDENV_DIR = "$WINDOWS_DIR\buildenv"
|
$BUILDENV_DIR = "$WINDOWS_DIR\buildenv"
|
||||||
$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
|
$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
|
||||||
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64"
|
$SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/64/"
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
# Script Start
|
# Script Start
|
||||||
|
|
|
@ -47,7 +47,7 @@ $NSIS_DIR = "$WINDOWS_DIR\nsis"
|
||||||
$BUILDENV_DIR = "$WINDOWS_DIR\buildenv"
|
$BUILDENV_DIR = "$WINDOWS_DIR\buildenv"
|
||||||
$PREREQS_DIR = "$WINDOWS_DIR\prereqs"
|
$PREREQS_DIR = "$WINDOWS_DIR\prereqs"
|
||||||
$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
|
$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
|
||||||
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64"
|
$SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/64/"
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
# Script Start
|
# Script Start
|
||||||
|
|
|
@ -75,11 +75,11 @@ $ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architectu
|
||||||
if ( $ARCH -eq "64bit" ) {
|
if ( $ARCH -eq "64bit" ) {
|
||||||
$ARCH = "AMD64"
|
$ARCH = "AMD64"
|
||||||
$ARCH_X = "x64"
|
$ARCH_X = "x64"
|
||||||
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64"
|
$SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/64/"
|
||||||
} else {
|
} else {
|
||||||
$ARCH = "x86"
|
$ARCH = "x86"
|
||||||
$ARCH_X = "x86"
|
$ARCH_X = "x86"
|
||||||
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/32"
|
$SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/32/"
|
||||||
}
|
}
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
setuptools >= 65.6.3,< 69.0
|
setuptools >= 65.6.3,< 69.0
|
||||||
setuptools-scm < 8.0.0
|
setuptools-scm < 8.0.0
|
||||||
pip >= 23.3,< 24.0
|
pip >= 23.3,< 24.0 ; python_version < '3.12'
|
||||||
|
pip >24 ; python_version >= '3.12'
|
||||||
|
|
|
@ -834,18 +834,19 @@ def zip_(zip_file, sources, template=None, cwd=None, runas=None, zip64=False):
|
||||||
if runas:
|
if runas:
|
||||||
os.seteuid(euid)
|
os.seteuid(euid)
|
||||||
os.setegid(egid)
|
os.setegid(egid)
|
||||||
if exc is not None:
|
if "exc" in vars() or "exc" in globals():
|
||||||
# Wait to raise the exception until euid/egid are restored to avoid
|
if exc is not None:
|
||||||
# permission errors in writing to minion log.
|
# Wait to raise the exception until euid/egid are restored to avoid
|
||||||
if exc == zipfile.LargeZipFile:
|
# permission errors in writing to minion log.
|
||||||
raise CommandExecutionError(
|
if exc == zipfile.LargeZipFile:
|
||||||
"Resulting zip file too large, would require ZIP64 support"
|
raise CommandExecutionError(
|
||||||
"which has not been enabled. Rerun command with zip64=True"
|
"Resulting zip file too large, would require ZIP64 support"
|
||||||
)
|
"which has not been enabled. Rerun command with zip64=True"
|
||||||
else:
|
)
|
||||||
raise CommandExecutionError(
|
else:
|
||||||
f"Exception encountered creating zipfile: {exc}"
|
raise CommandExecutionError(
|
||||||
)
|
f"Exception encountered creating zipfile: {exc}"
|
||||||
|
)
|
||||||
|
|
||||||
return archived_files
|
return archived_files
|
||||||
|
|
||||||
|
|
|
@ -2937,8 +2937,11 @@ def script(
|
||||||
os.chmod(path, 320)
|
os.chmod(path, 320)
|
||||||
os.chown(path, __salt__["file.user_to_uid"](runas), -1)
|
os.chown(path, __salt__["file.user_to_uid"](runas), -1)
|
||||||
|
|
||||||
if salt.utils.platform.is_windows() and shell.lower() != "powershell":
|
if salt.utils.platform.is_windows():
|
||||||
cmd_path = _cmd_quote(path, escape=False)
|
if shell.lower() not in ["powershell", "pwsh"]:
|
||||||
|
cmd_path = _cmd_quote(path, escape=False)
|
||||||
|
else:
|
||||||
|
cmd_path = path
|
||||||
else:
|
else:
|
||||||
cmd_path = _cmd_quote(path)
|
cmd_path = _cmd_quote(path)
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ Module for managing locales on POSIX-like systems.
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
import salt.utils.locales
|
import salt.utils.locales
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
|
@ -67,6 +68,10 @@ def _localectl_status():
|
||||||
"""
|
"""
|
||||||
if salt.utils.path.which("localectl") is None:
|
if salt.utils.path.which("localectl") is None:
|
||||||
raise CommandExecutionError('Unable to find "localectl"')
|
raise CommandExecutionError('Unable to find "localectl"')
|
||||||
|
else:
|
||||||
|
proc = subprocess.run(["localectl"], check=False, capture_output=True)
|
||||||
|
if b"Failed to connect to bus: No such file or directory" in proc.stderr:
|
||||||
|
raise CommandExecutionError('Command "localectl" is in a degraded state.')
|
||||||
|
|
||||||
ret = {}
|
ret = {}
|
||||||
locale_ctl_out = (__salt__["cmd.run"]("localectl status") or "").strip()
|
locale_ctl_out = (__salt__["cmd.run"]("localectl status") or "").strip()
|
||||||
|
|
|
@ -186,6 +186,7 @@ class TccDB:
|
||||||
self.connection = None
|
self.connection = None
|
||||||
self.ge_mojave_and_catalina = False
|
self.ge_mojave_and_catalina = False
|
||||||
self.ge_bigsur_and_later = False
|
self.ge_bigsur_and_later = False
|
||||||
|
self.ge_sonoma_and_later = False
|
||||||
|
|
||||||
def _check_table_digest(self):
|
def _check_table_digest(self):
|
||||||
# This logic comes from https://github.com/jacobsalmela/tccutil which is
|
# This logic comes from https://github.com/jacobsalmela/tccutil which is
|
||||||
|
@ -201,6 +202,8 @@ class TccDB:
|
||||||
elif digest in ("3d1c2a0e97", "cef70648de"):
|
elif digest in ("3d1c2a0e97", "cef70648de"):
|
||||||
# BigSur and later
|
# BigSur and later
|
||||||
self.ge_bigsur_and_later = True
|
self.ge_bigsur_and_later = True
|
||||||
|
elif digest in ("34abf99d20",):
|
||||||
|
self.ge_sonoma_and_later = True
|
||||||
else:
|
else:
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
f"TCC Database structure unknown for digest '{digest}'"
|
f"TCC Database structure unknown for digest '{digest}'"
|
||||||
|
@ -309,10 +312,56 @@ class TccDB:
|
||||||
(app_id, client_type, auth_value),
|
(app_id, client_type, auth_value),
|
||||||
)
|
)
|
||||||
self.connection.commit()
|
self.connection.commit()
|
||||||
|
elif self.ge_sonoma_and_later:
|
||||||
|
# CREATE TABLE access (
|
||||||
|
# service TEXT NOT NULL,
|
||||||
|
# client TEXT NOT NULL,
|
||||||
|
# client_type INTEGER NOT NULL,
|
||||||
|
# auth_value INTEGER NOT NULL,
|
||||||
|
# auth_reason INTEGER NOT NULL,
|
||||||
|
# auth_version INTEGER NOT NULL,
|
||||||
|
# csreq BLOB,
|
||||||
|
# policy_id INTEGER,
|
||||||
|
# indirect_object_identifier_type INTEGER,
|
||||||
|
# indirect_object_identifier TEXT NOT NULL DEFAULT 'UNUSED',
|
||||||
|
# indirect_object_code_identity BLOB,
|
||||||
|
# flags INTEGER,
|
||||||
|
# last_modified INTEGER NOT NULL DEFAULT (CAST(strftime('%s','now') AS INTEGER)),
|
||||||
|
# pid INTEGER,
|
||||||
|
# pid_version INTEGER,
|
||||||
|
# boot_uuid TEXT NOT NULL DEFAULT 'UNUSED',
|
||||||
|
# last_reminded INTEGER NOT NULL DEFAULT (CAST(strftime('%s','now') AS INTEGER)),
|
||||||
|
# PRIMARY KEY (service, client, client_type, indirect_object_identifier),
|
||||||
|
# FOREIGN KEY (policy_id)
|
||||||
|
self.connection.execute(
|
||||||
|
"""
|
||||||
|
INSERT or REPLACE INTO access VALUES(
|
||||||
|
'kTCCServiceAccessibility',
|
||||||
|
?,
|
||||||
|
?,
|
||||||
|
?,
|
||||||
|
4,
|
||||||
|
1,
|
||||||
|
NULL,
|
||||||
|
NULL,
|
||||||
|
NULL,
|
||||||
|
'UNUSED',
|
||||||
|
NULL,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
'UNUSED',
|
||||||
|
?
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
(app_id, client_type, auth_value, time.time()),
|
||||||
|
)
|
||||||
|
self.connection.commit()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def enabled(self, app_id):
|
def enabled(self, app_id):
|
||||||
if self.ge_bigsur_and_later:
|
if self.ge_bigsur_and_later or self.ge_sonoma_and_later:
|
||||||
column = "auth_value"
|
column = "auth_value"
|
||||||
elif self.ge_mojave_and_catalina:
|
elif self.ge_mojave_and_catalina:
|
||||||
column = "allowed"
|
column = "allowed"
|
||||||
|
@ -328,7 +377,7 @@ class TccDB:
|
||||||
def enable(self, app_id):
|
def enable(self, app_id):
|
||||||
if not self.installed(app_id):
|
if not self.installed(app_id):
|
||||||
return False
|
return False
|
||||||
if self.ge_bigsur_and_later:
|
if self.ge_bigsur_and_later or self.ge_sonoma_and_later:
|
||||||
column = "auth_value"
|
column = "auth_value"
|
||||||
elif self.ge_mojave_and_catalina:
|
elif self.ge_mojave_and_catalina:
|
||||||
column = "allowed"
|
column = "allowed"
|
||||||
|
@ -344,7 +393,7 @@ class TccDB:
|
||||||
def disable(self, app_id):
|
def disable(self, app_id):
|
||||||
if not self.installed(app_id):
|
if not self.installed(app_id):
|
||||||
return False
|
return False
|
||||||
if self.ge_bigsur_and_later:
|
if self.ge_bigsur_and_later or self.ge_sonoma_and_later:
|
||||||
column = "auth_value"
|
column = "auth_value"
|
||||||
elif self.ge_mojave_and_catalina:
|
elif self.ge_mojave_and_catalina:
|
||||||
column = "allowed"
|
column = "allowed"
|
||||||
|
|
|
@ -126,8 +126,8 @@ def _sync(form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None):
|
||||||
def update(version=None):
|
def update(version=None):
|
||||||
"""
|
"""
|
||||||
Update the salt minion from the URL defined in opts['update_url']
|
Update the salt minion from the URL defined in opts['update_url']
|
||||||
VMware, Inc provides the latest builds here:
|
Broadcom, Inc provides the latest builds here:
|
||||||
update_url: https://repo.saltproject.io/windows/
|
update_url: https://packages.broadcom.com/artifactory/saltproject-generic/windows/
|
||||||
|
|
||||||
Be aware that as of 2014-8-11 there's a bug in esky such that only the
|
Be aware that as of 2014-8-11 there's a bug in esky such that only the
|
||||||
latest version available in the update_url can be downloaded and installed.
|
latest version available in the update_url can be downloaded and installed.
|
||||||
|
|
|
@ -12,6 +12,7 @@ Support for reboot, shutdown, etc on POSIX-like systems.
|
||||||
with ``salt`` will work as expected.
|
with ``salt`` will work as expected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
from datetime import datetime, timedelta, tzinfo
|
from datetime import datetime, timedelta, tzinfo
|
||||||
|
@ -22,6 +23,8 @@ import salt.utils.platform
|
||||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||||
from salt.utils.decorators import depends
|
from salt.utils.decorators import depends
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
__virtualname__ = "system"
|
__virtualname__ = "system"
|
||||||
|
|
||||||
|
|
||||||
|
@ -202,10 +205,10 @@ def _swclock_to_hwclock():
|
||||||
"""
|
"""
|
||||||
res = __salt__["cmd.run_all"](["hwclock", "--systohc"], python_shell=False)
|
res = __salt__["cmd.run_all"](["hwclock", "--systohc"], python_shell=False)
|
||||||
if res["retcode"] != 0:
|
if res["retcode"] != 0:
|
||||||
msg = "hwclock failed to set hardware clock from software clock: {}".format(
|
log.warn(
|
||||||
res["stderr"]
|
"hwclock failed to set hardware clock from software clock: %s",
|
||||||
|
res["stderr"],
|
||||||
)
|
)
|
||||||
raise CommandExecutionError(msg)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1982,7 +1982,7 @@ if __name__ == "__main__":
|
||||||
L="Centerville",
|
L="Centerville",
|
||||||
O="SaltStack",
|
O="SaltStack",
|
||||||
OU=None,
|
OU=None,
|
||||||
emailAddress="test_system@saltstack.org",
|
emailAddress="test_system@saltproject.io",
|
||||||
)
|
)
|
||||||
create_ca_signed_cert("koji", "test_system")
|
create_ca_signed_cert("koji", "test_system")
|
||||||
create_pkcs12("koji", "test_system", passphrase="test")
|
create_pkcs12("koji", "test_system", passphrase="test")
|
||||||
|
|
|
@ -15,7 +15,7 @@ Configuring the LDAP ext_pillar
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
The basic configuration is part of the `master configuration
|
The basic configuration is part of the `master configuration
|
||||||
<master-configuration-ext-pillar>`_.
|
<_master-configuration-ext-pillar>`_.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ A flexible renderer that takes a templating engine and a data format
|
||||||
:platform: all
|
:platform: all
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# See http://docs.saltstack.org/en/latest/ref/renderers/all/salt.renderers.stateconf.html
|
# See http://docs.saltproject.io/en/latest/ref/renderers/all/salt.renderers.stateconf.html
|
||||||
# for a guide to using this module.
|
# for a guide to using this module.
|
||||||
#
|
#
|
||||||
# FIXME: I really need to review and simplify this renderer, it's getting out of hand!
|
# FIXME: I really need to review and simplify this renderer, it's getting out of hand!
|
||||||
|
|
|
@ -24,7 +24,7 @@ try:
|
||||||
|
|
||||||
HAS_RANGE = True
|
HAS_RANGE = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
log.error("Unable to load range library")
|
log.warning("Unable to load range library")
|
||||||
# pylint: enable=import-error
|
# pylint: enable=import-error
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -769,7 +769,7 @@ def bootstrap_psexec(
|
||||||
|
|
||||||
installer_url
|
installer_url
|
||||||
URL of minion installer executable. Defaults to the latest version from
|
URL of minion installer executable. Defaults to the latest version from
|
||||||
https://repo.saltproject.io/windows/
|
https://packages.broadcom.com/artifactory/saltproject-generic/windows/
|
||||||
|
|
||||||
username
|
username
|
||||||
Optional user name for login on remote computer.
|
Optional user name for login on remote computer.
|
||||||
|
@ -787,6 +787,9 @@ def bootstrap_psexec(
|
||||||
salt-run manage.bootstrap_psexec hosts='host1,host2' installer_url='http://exampledomain/salt-installer.exe'
|
salt-run manage.bootstrap_psexec hosts='host1,host2' installer_url='http://exampledomain/salt-installer.exe'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# TODO: Need to make this gets the latest version from the new repo location
|
||||||
|
# TODO: Similar to tests/support/win_installer.py
|
||||||
|
# TODO: Maybe need to move that ^^^^ to a salt util
|
||||||
if not installer_url:
|
if not installer_url:
|
||||||
base_url = "https://repo.saltproject.io/windows/"
|
base_url = "https://repo.saltproject.io/windows/"
|
||||||
source = urllib.request.urlopen(base_url).read()
|
source = urllib.request.urlopen(base_url).read()
|
||||||
|
|
|
@ -99,17 +99,17 @@ Using ``aptkey: False`` with ``key_url`` example:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://repo.saltproject.io/py3/ubuntu/18.04/amd64/latest bionic main:
|
deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://packages.broadcom.com/artifactory/saltproject-deb/ bionic main:
|
||||||
pkgrepo.managed:
|
pkgrepo.managed:
|
||||||
- file: /etc/apt/sources.list.d/salt.list
|
- file: /etc/apt/sources.list.d/salt.list
|
||||||
- key_url: https://repo.saltproject.io/py3/ubuntu/18.04/amd64/latest/salt-archive-keyring.gpg
|
- key_url: https://packages.broadcom.com/artifactory/api/security/keypair/SaltProjectKey/public
|
||||||
- aptkey: False
|
- aptkey: False
|
||||||
|
|
||||||
Using ``aptkey: False`` with ``keyserver`` and ``keyid``:
|
Using ``aptkey: False`` with ``keyserver`` and ``keyid``:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://repo.saltproject.io/py3/ubuntu/18.04/amd64/latest bionic main:
|
deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch=amd64] https://packages.broadcom.com/artifactory/saltproject-deb/ bionic main:
|
||||||
pkgrepo.managed:
|
pkgrepo.managed:
|
||||||
- file: /etc/apt/sources.list.d/salt.list
|
- file: /etc/apt/sources.list.d/salt.list
|
||||||
- keyserver: keyserver.ubuntu.com
|
- keyserver: keyserver.ubuntu.com
|
||||||
|
|
|
@ -76,6 +76,32 @@ def offline(context=None):
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def status(context=None):
|
||||||
|
"""Return True if systemd status succeeds. When False, the system may have
|
||||||
|
been booted with systemd but systemd is in a degraded state.
|
||||||
|
|
||||||
|
.. versionadded:: 3006.0
|
||||||
|
"""
|
||||||
|
contextkey = "salt.utils.systemd.status"
|
||||||
|
if isinstance(context, (dict, salt.loader.context.NamedLoaderContext)):
|
||||||
|
# Can't put this if block on the same line as the above if block,
|
||||||
|
# because it will break the elif below.
|
||||||
|
if contextkey in context:
|
||||||
|
return context[contextkey]
|
||||||
|
elif context is not None:
|
||||||
|
raise SaltInvocationError("context must be a dictionary if passed")
|
||||||
|
proc = subprocess.run(
|
||||||
|
["systemctl", "status"],
|
||||||
|
check=False,
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
ret = (
|
||||||
|
b"Failed to get D-Bus connection: No such file or directory" not in proc.stderr
|
||||||
|
)
|
||||||
|
context[contextkey] = ret
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def version(context=None):
|
def version(context=None):
|
||||||
"""
|
"""
|
||||||
Attempts to run systemctl --version. Returns None if unable to determine
|
Attempts to run systemctl --version. Returns None if unable to determine
|
||||||
|
@ -123,7 +149,10 @@ def has_scope(context=None):
|
||||||
_sd_version = version(context)
|
_sd_version = version(context)
|
||||||
if _sd_version is None:
|
if _sd_version is None:
|
||||||
return False
|
return False
|
||||||
return _sd_version >= 205
|
if status(context):
|
||||||
|
return _sd_version >= 205
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def pid_to_service(pid):
|
def pid_to_service(pid):
|
||||||
|
@ -143,7 +172,10 @@ def _pid_to_service_systemctl(pid):
|
||||||
systemd_cmd = ["systemctl", "--output", "json", "status", str(pid)]
|
systemd_cmd = ["systemctl", "--output", "json", "status", str(pid)]
|
||||||
try:
|
try:
|
||||||
systemd_output = subprocess.run(
|
systemd_output = subprocess.run(
|
||||||
systemd_cmd, check=True, text=True, capture_output=True
|
systemd_cmd,
|
||||||
|
check=True,
|
||||||
|
text=True,
|
||||||
|
capture_output=True,
|
||||||
)
|
)
|
||||||
status_json = salt.utils.json.find_json(systemd_output.stdout)
|
status_json = salt.utils.json.find_json(systemd_output.stdout)
|
||||||
except (ValueError, subprocess.CalledProcessError):
|
except (ValueError, subprocess.CalledProcessError):
|
||||||
|
|
|
@ -16,6 +16,7 @@ import _pytest.logging
|
||||||
import _pytest.skipping
|
import _pytest.skipping
|
||||||
import more_itertools
|
import more_itertools
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytestskipmarkers
|
||||||
|
|
||||||
import salt
|
import salt
|
||||||
import salt._logging
|
import salt._logging
|
||||||
|
@ -426,7 +427,8 @@ def pytest_itemcollected(item):
|
||||||
pytest.fail(
|
pytest.fail(
|
||||||
"The test {!r} appears to be written for pytest but it's not under"
|
"The test {!r} appears to be written for pytest but it's not under"
|
||||||
" {!r}. Please move it there.".format(
|
" {!r}. Please move it there.".format(
|
||||||
item.nodeid, str(PYTESTS_DIR.relative_to(CODE_DIR)), pytrace=False
|
item.nodeid,
|
||||||
|
str(PYTESTS_DIR.relative_to(CODE_DIR)),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -801,6 +803,12 @@ def salt_factories_default_root_dir(salt_factories_default_root_dir):
|
||||||
dictionary, then that's the value used, and not the one returned by
|
dictionary, then that's the value used, and not the one returned by
|
||||||
this fixture.
|
this fixture.
|
||||||
"""
|
"""
|
||||||
|
if os.environ.get("CI") and pytestskipmarkers.utils.platform.is_windows():
|
||||||
|
tempdir = pathlib.Path(
|
||||||
|
os.environ.get("RUNNER_TEMP", r"C:\Windows\Temp")
|
||||||
|
).resolve()
|
||||||
|
return tempdir / "stsuite"
|
||||||
|
|
||||||
return salt_factories_default_root_dir / "stsuite"
|
return salt_factories_default_root_dir / "stsuite"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -155,7 +155,7 @@ salt/engines/*:
|
||||||
- pytests.unit.engines.test_engines
|
- pytests.unit.engines.test_engines
|
||||||
|
|
||||||
salt/grains/*:
|
salt/grains/*:
|
||||||
- integration.grains.test_custom
|
- pytests.integration.grains.test_custom
|
||||||
|
|
||||||
salt/matchers/*:
|
salt/matchers/*:
|
||||||
- integration.states.test_match
|
- integration.states.test_match
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
"""
|
|
||||||
Test the core grains
|
|
||||||
"""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from tests.support.case import ModuleCase
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.windows_whitelisted
|
|
||||||
class TestGrainsCore(ModuleCase):
|
|
||||||
"""
|
|
||||||
Test the core grains grains
|
|
||||||
"""
|
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
|
||||||
def test_grains_passed_to_custom_grain(self):
|
|
||||||
"""
|
|
||||||
test if current grains are passed to grains module functions that have a grains argument
|
|
||||||
"""
|
|
||||||
self.assertEqual(
|
|
||||||
self.run_function("grains.get", ["custom_grain_test"]), "itworked"
|
|
||||||
)
|
|
|
@ -225,6 +225,7 @@ class CMDModuleTest(ModuleCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skip_on_windows
|
||||||
def test_script(self):
|
def test_script(self):
|
||||||
"""
|
"""
|
||||||
cmd.script
|
cmd.script
|
||||||
|
@ -235,6 +236,7 @@ class CMDModuleTest(ModuleCase):
|
||||||
self.assertEqual(ret["stdout"], args)
|
self.assertEqual(ret["stdout"], args)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skip_on_windows
|
||||||
def test_script_query_string(self):
|
def test_script_query_string(self):
|
||||||
"""
|
"""
|
||||||
cmd.script
|
cmd.script
|
||||||
|
@ -245,6 +247,7 @@ class CMDModuleTest(ModuleCase):
|
||||||
self.assertEqual(ret["stdout"], args)
|
self.assertEqual(ret["stdout"], args)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skip_on_windows
|
||||||
def test_script_retcode(self):
|
def test_script_retcode(self):
|
||||||
"""
|
"""
|
||||||
cmd.script_retcode
|
cmd.script_retcode
|
||||||
|
@ -254,6 +257,7 @@ class CMDModuleTest(ModuleCase):
|
||||||
self.assertEqual(ret, 0)
|
self.assertEqual(ret, 0)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skip_on_windows
|
||||||
def test_script_cwd(self):
|
def test_script_cwd(self):
|
||||||
"""
|
"""
|
||||||
cmd.script with cwd
|
cmd.script with cwd
|
||||||
|
@ -267,6 +271,7 @@ class CMDModuleTest(ModuleCase):
|
||||||
self.assertEqual(ret["stdout"], args)
|
self.assertEqual(ret["stdout"], args)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skip_on_windows
|
||||||
def test_script_cwd_with_space(self):
|
def test_script_cwd_with_space(self):
|
||||||
"""
|
"""
|
||||||
cmd.script with cwd
|
cmd.script with cwd
|
||||||
|
@ -515,7 +520,7 @@ class CMDModuleTest(ModuleCase):
|
||||||
"""
|
"""
|
||||||
with self._ensure_user_exists(self.runas_usr):
|
with self._ensure_user_exists(self.runas_usr):
|
||||||
out = self.run_function(
|
out = self.run_function(
|
||||||
"cmd.run", ["env"], runas=self.runas_usr
|
"cmd.run", ["env"], runas=self.runas_usr, cwd="/tmp"
|
||||||
).splitlines()
|
).splitlines()
|
||||||
self.assertIn(f"USER={self.runas_usr}", out)
|
self.assertIn(f"USER={self.runas_usr}", out)
|
||||||
|
|
||||||
|
|
|
@ -231,12 +231,15 @@ class CPModuleTest(ModuleCase):
|
||||||
"""
|
"""
|
||||||
cp.get_url with https:// source given
|
cp.get_url with https:// source given
|
||||||
"""
|
"""
|
||||||
self.run_function("cp.get_url", ["https://repo.saltproject.io/index.html", tgt])
|
self.run_function(
|
||||||
|
"cp.get_url",
|
||||||
|
["https://packages.broadcom.com/artifactory/saltproject-generic/", tgt],
|
||||||
|
)
|
||||||
with salt.utils.files.fopen(tgt, "r") as instructions:
|
with salt.utils.files.fopen(tgt, "r") as instructions:
|
||||||
data = salt.utils.stringutils.to_unicode(instructions.read())
|
data = salt.utils.stringutils.to_unicode(instructions.read())
|
||||||
self.assertIn("Salt Project", data)
|
self.assertIn("Index of saltproject", data)
|
||||||
self.assertIn("Package", data)
|
self.assertIn("onedir", data)
|
||||||
self.assertIn("Repo", data)
|
self.assertIn("Artifactory Online Server", data)
|
||||||
self.assertNotIn("AYBABTU", data)
|
self.assertNotIn("AYBABTU", data)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@ -245,14 +248,15 @@ class CPModuleTest(ModuleCase):
|
||||||
cp.get_url with https:// source given and destination omitted.
|
cp.get_url with https:// source given and destination omitted.
|
||||||
"""
|
"""
|
||||||
ret = self.run_function(
|
ret = self.run_function(
|
||||||
"cp.get_url", ["https://repo.saltproject.io/index.html"]
|
"cp.get_url",
|
||||||
|
["https://packages.broadcom.com/artifactory/saltproject-generic/"],
|
||||||
)
|
)
|
||||||
|
|
||||||
with salt.utils.files.fopen(ret, "r") as instructions:
|
with salt.utils.files.fopen(ret, "r") as instructions:
|
||||||
data = salt.utils.stringutils.to_unicode(instructions.read())
|
data = salt.utils.stringutils.to_unicode(instructions.read())
|
||||||
self.assertIn("Salt Project", data)
|
self.assertIn("Index of saltproject", data)
|
||||||
self.assertIn("Package", data)
|
self.assertIn("onedir", data)
|
||||||
self.assertIn("Repo", data)
|
self.assertIn("Artifactory Online Server", data)
|
||||||
self.assertNotIn("AYBABTU", data)
|
self.assertNotIn("AYBABTU", data)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@ -266,16 +270,19 @@ class CPModuleTest(ModuleCase):
|
||||||
tgt = None
|
tgt = None
|
||||||
while time.time() - start <= timeout:
|
while time.time() - start <= timeout:
|
||||||
ret = self.run_function(
|
ret = self.run_function(
|
||||||
"cp.get_url", ["https://repo.saltproject.io/index.html", tgt]
|
"cp.get_url",
|
||||||
|
["https://packages.broadcom.com/artifactory/saltproject-generic/", tgt],
|
||||||
)
|
)
|
||||||
if ret.find("HTTP 599") == -1:
|
if ret.find("HTTP 599") == -1:
|
||||||
break
|
break
|
||||||
time.sleep(sleep)
|
time.sleep(sleep)
|
||||||
if ret.find("HTTP 599") != -1:
|
if ret.find("HTTP 599") != -1:
|
||||||
raise Exception("https://repo.saltproject.io/index.html returned 599 error")
|
raise Exception(
|
||||||
self.assertIn("Salt Project", ret)
|
"https://packages.broadcom.com/artifactory/saltproject-generic/ returned 599 error"
|
||||||
self.assertIn("Package", ret)
|
)
|
||||||
self.assertIn("Repo", ret)
|
self.assertIn("Index of saltproject", ret)
|
||||||
|
self.assertIn("onedir", ret)
|
||||||
|
self.assertIn("Artifactory Online Server", ret)
|
||||||
self.assertNotIn("AYBABTU", ret)
|
self.assertNotIn("AYBABTU", ret)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@ -344,11 +351,11 @@ class CPModuleTest(ModuleCase):
|
||||||
"""
|
"""
|
||||||
cp.get_file_str with https:// source given
|
cp.get_file_str with https:// source given
|
||||||
"""
|
"""
|
||||||
src = "https://repo.saltproject.io/index.html"
|
src = "https://packages.broadcom.com/artifactory/saltproject-generic/"
|
||||||
ret = self.run_function("cp.get_file_str", [src])
|
ret = self.run_function("cp.get_file_str", [src])
|
||||||
self.assertIn("Salt Project", ret)
|
self.assertIn("Index of saltproject", ret)
|
||||||
self.assertIn("Package", ret)
|
self.assertIn("onedir", ret)
|
||||||
self.assertIn("Repo", ret)
|
self.assertIn("Artifactory Online Server", ret)
|
||||||
self.assertNotIn("AYBABTU", ret)
|
self.assertNotIn("AYBABTU", ret)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
|
|
@ -7,6 +7,7 @@ import pytest
|
||||||
|
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
import salt.utils.stringutils
|
||||||
from tests.support.case import ModuleCase
|
from tests.support.case import ModuleCase
|
||||||
from tests.support.helpers import requires_system_grains
|
from tests.support.helpers import requires_system_grains
|
||||||
from tests.support.runtests import RUNTIME_VARS
|
from tests.support.runtests import RUNTIME_VARS
|
||||||
|
@ -199,7 +200,7 @@ class FileModuleTest(ModuleCase):
|
||||||
assert ret["retcode"] == 0, repr(ret)
|
assert ret["retcode"] == 0, repr(ret)
|
||||||
with salt.utils.files.fopen(src_file) as fp:
|
with salt.utils.files.fopen(src_file) as fp:
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
salt.utils.stringutils.to_unicode(fp.read()), "Hello world\n"
|
salt.utils.stringutils.to_unicode(fp.read()), f"Hello world{os.linesep}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_remove_file(self):
|
def test_remove_file(self):
|
||||||
|
|
|
@ -5,6 +5,7 @@ Integration tests for Ruby Gem module
|
||||||
import pytest
|
import pytest
|
||||||
from tornado.httpclient import HTTPClient
|
from tornado.httpclient import HTTPClient
|
||||||
|
|
||||||
|
import salt.utils.platform
|
||||||
from tests.support.case import ModuleCase
|
from tests.support.case import ModuleCase
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,6 +32,7 @@ class GemModuleTest(ModuleCase):
|
||||||
if check_status() is False:
|
if check_status() is False:
|
||||||
self.skipTest("External resource 'https://rubygems.org' is not available")
|
self.skipTest("External resource 'https://rubygems.org' is not available")
|
||||||
|
|
||||||
|
self.GEM_BIN = "gem.cmd" if salt.utils.platform.is_windows() else "gem"
|
||||||
self.GEM = "tidy"
|
self.GEM = "tidy"
|
||||||
self.GEM_VER = "1.1.2"
|
self.GEM_VER = "1.1.2"
|
||||||
self.OLD_GEM = "brass"
|
self.OLD_GEM = "brass"
|
||||||
|
@ -54,6 +56,11 @@ class GemModuleTest(ModuleCase):
|
||||||
|
|
||||||
self.addCleanup(uninstall_gem)
|
self.addCleanup(uninstall_gem)
|
||||||
|
|
||||||
|
def run_function(self, function, *args, **kwargs):
|
||||||
|
"""Override run_function to use the gem binary"""
|
||||||
|
kwargs["gem_bin"] = self.GEM_BIN
|
||||||
|
return super().run_function(function, *args, **kwargs)
|
||||||
|
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
def test_install_uninstall(self):
|
def test_install_uninstall(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -52,6 +52,7 @@ class ShadowModuleTest(ModuleCase):
|
||||||
|
|
||||||
@pytest.mark.destructive_test
|
@pytest.mark.destructive_test
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skip_if_binaries_missing("passwd")
|
||||||
def test_del_password(self):
|
def test_del_password(self):
|
||||||
"""
|
"""
|
||||||
Test shadow.del_password
|
Test shadow.del_password
|
||||||
|
@ -61,8 +62,9 @@ class ShadowModuleTest(ModuleCase):
|
||||||
|
|
||||||
# Correct Functionality
|
# Correct Functionality
|
||||||
self.assertTrue(self.run_function("shadow.del_password", [self._test_user]))
|
self.assertTrue(self.run_function("shadow.del_password", [self._test_user]))
|
||||||
self.assertEqual(
|
self.assertIn(
|
||||||
self.run_function("shadow.info", [self._test_user])["passwd"], ""
|
self.run_function("shadow.info", [self._test_user])["passwd"],
|
||||||
|
["", "!", "!!"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# User does not exist
|
# User does not exist
|
||||||
|
|
|
@ -1,13 +1,27 @@
|
||||||
|
import subprocess
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import salt.utils.platform
|
||||||
from tests.support.case import ModuleCase
|
from tests.support.case import ModuleCase
|
||||||
|
|
||||||
|
|
||||||
|
def _check_systemctl():
|
||||||
|
if not hasattr(_check_systemctl, "memo"):
|
||||||
|
if not salt.utils.platform.is_linux():
|
||||||
|
_check_systemctl.memo = False
|
||||||
|
else:
|
||||||
|
proc = subprocess.run(["localectl"], capture_output=True, check=False)
|
||||||
|
_check_systemctl.memo = b"No such file or directory" in proc.stderr
|
||||||
|
return _check_systemctl.memo
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip_on_windows(reason="minion is windows")
|
@pytest.mark.skip_on_windows(reason="minion is windows")
|
||||||
@pytest.mark.skip_on_darwin(reason="locale method is not supported on mac")
|
@pytest.mark.skip_on_darwin(reason="locale method is not supported on mac")
|
||||||
@pytest.mark.skip_on_freebsd(
|
@pytest.mark.skip_on_freebsd(
|
||||||
reason="locale method is supported only within login classes or environment variables"
|
reason="locale method is supported only within login classes or environment variables"
|
||||||
)
|
)
|
||||||
|
@pytest.mark.skipif(_check_systemctl(), reason="localectl degraded")
|
||||||
@pytest.mark.requires_salt_modules("locale")
|
@pytest.mark.requires_salt_modules("locale")
|
||||||
@pytest.mark.windows_whitelisted
|
@pytest.mark.windows_whitelisted
|
||||||
class LocaleModuleTest(ModuleCase):
|
class LocaleModuleTest(ModuleCase):
|
||||||
|
@ -50,6 +64,7 @@ class LocaleModuleTest(ModuleCase):
|
||||||
|
|
||||||
@pytest.mark.destructive_test
|
@pytest.mark.destructive_test
|
||||||
@pytest.mark.slow_test
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skipif(_check_systemctl(), reason="systemd degraded")
|
||||||
def test_set_locale(self):
|
def test_set_locale(self):
|
||||||
original_locale = self.run_function("locale.get_locale")
|
original_locale = self.run_function("locale.get_locale")
|
||||||
locale_to_set = self._find_new_locale(original_locale)
|
locale_to_set = self._find_new_locale(original_locale)
|
||||||
|
|
|
@ -4,8 +4,11 @@ Integration tests for timezone module
|
||||||
Linux and Solaris are supported
|
Linux and Solaris are supported
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import salt.utils.platform
|
||||||
from tests.support.case import ModuleCase
|
from tests.support.case import ModuleCase
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -16,6 +19,17 @@ except ImportError:
|
||||||
HAS_TZLOCAL = False
|
HAS_TZLOCAL = False
|
||||||
|
|
||||||
|
|
||||||
|
def _check_systemctl():
|
||||||
|
if not hasattr(_check_systemctl, "memo"):
|
||||||
|
if not salt.utils.platform.is_linux():
|
||||||
|
_check_systemctl.memo = False
|
||||||
|
else:
|
||||||
|
proc = subprocess.run(["timedatectl"], capture_output=True, check=False)
|
||||||
|
_check_systemctl.memo = b"No such file or directory" in proc.stderr
|
||||||
|
return _check_systemctl.memo
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
|
||||||
class TimezoneLinuxModuleTest(ModuleCase):
|
class TimezoneLinuxModuleTest(ModuleCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""
|
"""
|
||||||
|
@ -32,6 +46,7 @@ class TimezoneLinuxModuleTest(ModuleCase):
|
||||||
self.assertIn(ret, timescale)
|
self.assertIn(ret, timescale)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(_check_systemctl(), reason="systemctl degraded")
|
||||||
class TimezoneSolarisModuleTest(ModuleCase):
|
class TimezoneSolarisModuleTest(ModuleCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -99,9 +99,7 @@ def test_help_log(salt_cli):
|
||||||
"""
|
"""
|
||||||
ret = salt_cli.run("--help")
|
ret = salt_cli.run("--help")
|
||||||
count = 0
|
count = 0
|
||||||
stdout = ret.stdout.split("\n")
|
# This can be dependent on COLUMNS environment variable
|
||||||
for line in stdout:
|
assert "sensitive data: all, debug, garbage, profile, trace" in " ".join(
|
||||||
if "sensitive data:" in line:
|
ret.stdout.split()
|
||||||
count += 1
|
)
|
||||||
assert line.strip() == "sensitive data: all, debug, garbage, profile, trace"
|
|
||||||
assert count == 2
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue