Merge branch 'master' into metadata_azure

This commit is contained in:
Gareth J. Greenaway 2023-02-03 18:37:58 -08:00 committed by GitHub
commit aad8c7bb75
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
43 changed files with 2882 additions and 81 deletions

View file

@ -5,3 +5,6 @@ self-hosted-runner:
- x86_64
- arm64
- aarch64
- repo-nightly
- repo-staging
- repo-release

View file

@ -37,7 +37,7 @@ runs:
uses: actions/cache@v3
with:
path: artifacts/${{ inputs.package-name }}
key: ${{ inputs.cache-seed }}|relenv|${{ env.RELENV_VERSION }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt') }}
key: ${{ inputs.cache-seed }}|relenv|${{ env.RELENV_VERSION }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt', '.github/actions/build-onedir-deps/action.yml') }}
- name: Create Onedir Directory
shell: bash
@ -46,6 +46,17 @@ runs:
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
python3 -m relenv create --arch=${{ inputs.arch }} artifacts/${{ inputs.package-name }}
- name: Get Onedir Python Version
shell: bash
if: steps.onedir-pkg-cache.outputs.cache-hit != 'true'
run: |
if [ "${{ inputs.platform }}" != "windows" ]; then
PY_VERSION=$(artifacts/${{ inputs.package-name }}/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info))")
else
PY_VERSION=$(artifacts/${{ inputs.package-name }}/Scripts/python -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info))")
fi
echo "PY_VERSION=$PY_VERSION" >> "$GITHUB_ENV"
- name: Upgrade Setuptools & Pip
shell: bash
if: steps.onedir-pkg-cache.outputs.cache-hit != 'true'
@ -60,11 +71,12 @@ runs:
- name: Install Salt Onedir Package Dependencies
shell: bash
if: steps.onedir-pkg-cache.outputs.cache-hit != 'true'
run: |
if [ "${{ inputs.platform }}" != "windows" ]; then
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install -r requirements/static/pkg/py3.10/${{ inputs.platform }}.txt
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install -r requirements/static/pkg/py${PY_VERSION}/${{ inputs.platform }}.txt --no-binary=':all:'
else
artifacts/${{ inputs.package-name }}/Scripts/python -m pip install -r requirements/static/pkg/py3.10/${{ inputs.platform }}.txt
artifacts/${{ inputs.package-name }}/Scripts/python -m pip install -r requirements/static/pkg/py${PY_VERSION}/${{ inputs.platform }}.txt
fi
- name: Cleanup Salt Onedir Directory

View file

@ -19,6 +19,10 @@ inputs:
required: true
type: string
description: Seed used to invalidate caches
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
env:
@ -37,12 +41,12 @@ runs:
uses: actions/cache@v3
with:
path: artifacts/${{ inputs.package-name }}
key: ${{ inputs.cache-seed }}|relenv|${{ env.RELENV_VERSION }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt') }}
key: ${{ inputs.cache-seed }}|relenv|${{ env.RELENV_VERSION }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt', '.github/actions/build-onedir-deps/action.yml') }}
- name: Download Source Tarball
uses: actions/download-artifact@v3
with:
name: salt-${{ env.SALT_VERSION }}.tar.gz
name: salt-${{ inputs.salt-version }}.tar.gz
- name: Install Salt Into Onedir
if: ${{ inputs.platform != 'windows' }}
@ -51,7 +55,7 @@ runs:
RELENV_PIP_DIR: "1"
shell: bash
run: |
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install salt-${{ env.SALT_VERSION }}.tar.gz
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install salt-${{ inputs.salt-version }}.tar.gz
if [ ${{ inputs.platform }} == "darwin" ]; then
pkg/macos/prep_salt.sh --build-dir ./artifacts/${{ inputs.package-name }}
rm -rf ./artifacts/${{ inputs.package-name }}/opt
@ -66,7 +70,7 @@ runs:
shell: powershell
run: |
# install salt
pkg\windows\install_salt.cmd -BuildDir ".\artifacts\${{ inputs.package-name }}" -CICD -SourceTarball salt-${{ env.SALT_VERSION }}.tar.gz
pkg\windows\install_salt.cmd -BuildDir ".\artifacts\${{ inputs.package-name }}" -CICD -SourceTarball salt-${{ inputs.salt-version }}.tar.gz
# prep salt
pkg\windows\prep_salt.cmd -BuildDir ".\artifacts\${{ inputs.package-name }}" -CICD
@ -79,25 +83,25 @@ runs:
shell: bash
run: |
cd artifacts/
tar -cJf ${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz ${{ inputs.package-name }}
tar -cJf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz ${{ inputs.package-name }}
- name: Create Archive (Zipfile)
if: ${{ inputs.platform == 'windows' }}
shell: powershell
run: |
cd artifacts
py -3 -m zipfile -c "${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip" ${{ inputs.package-name }}
py -3 -m zipfile -c "${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip" ${{ inputs.package-name }}
- name: Create Hash Files
shell: bash
run: |
tools pkg generate-hashes artifacts/${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.*
tools pkg generate-hashes artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.*
- name: Upload Onedir Tarball as an Artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz*
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz*
retention-days: 7
if-no-files-found: error
@ -105,7 +109,7 @@ runs:
if: ${{ inputs.platform == 'windows' }}
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip
path: artifacts/${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip*
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip
path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip*
retention-days: 7
if-no-files-found: error

View file

@ -24,7 +24,7 @@ jobs:
- aarch64
container:
image: ghcr.io/saltstack/salt-ci-containers/debian:11
image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-11
steps:
- uses: actions/checkout@v3
@ -72,7 +72,7 @@ jobs:
- name: Upload DEBs
uses: actions/upload-artifact@v3
with:
name: salt-${{ matrix.arch }}-debs
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-pkgs
path: ${{ github.workspace }}/pkgs/*
retention-days: 7
if-no-files-found: error

149
.github/workflows/build-deb-repo.yml vendored Normal file
View file

@ -0,0 +1,149 @@
name: Build DEB Apt Repository
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
nightly-build:
type: boolean
default: false
rc-build:
type: boolean
default: false
environment:
type: string
description: On which GitHub Environment Context To Run
secrets:
SECRETS_KEY:
required: true
env:
COLUMNS: 160
jobs:
build-repo:
name: DEB
environment: ${{ inputs.environment }}
runs-on:
- self-hosted
- linux
- repo-${{ inputs.environment }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
include:
- distro: debian
version: "10"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: debian
version: "10"
arch: aarch64
key-id: 0E08A149DE57BFBE
- distro: debian
version: "11"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: debian
version: "11"
arch: aarch64
key-id: 0E08A149DE57BFBE
- distro: ubuntu
version: "18.04"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: ubuntu
version: "20.04"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: ubuntu
version: "20.04"
arch: aarch64
key-id: 0E08A149DE57BFBE
- distro: ubuntu
version: "22.04"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: ubuntu
version: "22.04"
arch: aarch64
key-id: 0E08A149DE57BFBE
steps:
- uses: actions/checkout@v3
- name: Download System Dependencies
run: |
sudo apt update
sudo apt install -y devscripts apt-utils
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Download DEB Packages
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-pkgs
path: artifacts/pkgs/incoming
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text | jq .default_key -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text| jq .default_passphrase -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Create Repository Path
run: |
mkdir -p artifacts/pkgs/repo
- name: Download `salt-archive-keyring.gpg`
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/salt-archive-keyring-gpg-file \
--query SecretString --output text| jq .base64 -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o ~/salt-archive-keyring.gpg -d -
rm "${SECRETS_KEY_FILE}"
- name: Create Repository
run: |
tools pkg-repo deb --key-id=${{ matrix.key-id }} --distro-arch=${{ matrix.arch }} \
${{ inputs.nightly-build && '--nightly-build' || '' }} --salt-version=${{ inputs.salt-version }} \
${{ inputs.rc-build && '--rc-build' || '' }} --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.environment }}-repo
path: artifacts/pkgs/repo/*
retention-days: 7
if-no-files-found: error

View file

@ -58,7 +58,7 @@ jobs:
- name: Upload ${{ matrix.arch }} Package
uses: actions/upload-artifact@v3
with:
name: macOS ${{ matrix.arch }} Package
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos-pkgs
path: pkg/macos/*unsigned.pkg
retention-days: 7
if-no-files-found: error

108
.github/workflows/build-macos-repo.yml vendored Normal file
View file

@ -0,0 +1,108 @@
name: Build macOS Repository
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
nightly-build:
type: boolean
default: false
rc-build:
type: boolean
default: false
environment:
type: string
description: On which GitHub Environment Context To Run
secrets:
SECRETS_KEY:
required: true
env:
COLUMNS: 160
jobs:
build-repo:
name: macOS
environment: ${{ inputs.environment }}
runs-on:
- self-hosted
- linux
- repo-${{ inputs.environment }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
key-id:
- "0E08A149DE57BFBE"
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Download macOS x86_64 Packages
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-x86_64-macos-pkgs
path: artifacts/pkgs/incoming
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text | jq .default_key -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text| jq .default_passphrase -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Create Repository Path
run: |
mkdir -p artifacts/pkgs/repo
- name: Download `salt-archive-keyring.gpg`
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/salt-archive-keyring-gpg-file \
--query SecretString --output text| jq .base64 -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o ~/salt-archive-keyring.gpg -d -
rm "${SECRETS_KEY_FILE}"
- name: Create Repository
run: |
tools pkg-repo macos --key-id=${{ matrix.key-id }} \
${{ inputs.nightly-build && '--nightly-build' || '' }} --salt-version=${{ inputs.salt-version }} \
${{ inputs.rc-build && '--rc-build' || '' }} --incoming=artifacts/pkgs/incoming \
--repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.environment }}-repo
path: artifacts/pkgs/repo/*
retention-days: 7
if-no-files-found: error

150
.github/workflows/build-onedir-repo.yml vendored Normal file
View file

@ -0,0 +1,150 @@
name: Build Onedir Repository
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
nightly-build:
type: boolean
default: false
rc-build:
type: boolean
default: false
environment:
type: string
description: On which GitHub Environment Context To Run
secrets:
SECRETS_KEY:
required: true
env:
COLUMNS: 160
jobs:
build-repo:
name: Onedir
environment: ${{ inputs.environment }}
runs-on:
- self-hosted
- linux
- repo-${{ inputs.environment }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
key-id:
- "0E08A149DE57BFBE"
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Download Windows Packages
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-x86-windows-pkgs
path: artifacts/pkgs/incoming
- name: Download Linux x86_64 Onedir Archive
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-linux-x86_64.tar.xz
path: artifacts/pkgs/incoming
- name: Download Linux aarch64 Onedir Archive
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-linux-aarch64.tar.xz
path: artifacts/pkgs/incoming
- name: Download macOS x86_64 Onedir Archive
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-darwin-x86_64.tar.xz
path: artifacts/pkgs/incoming
- name: Download Windows amd64 Onedir Archive
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-windows-amd64.tar.xz
path: artifacts/pkgs/incoming
- name: Download Windows amd64 Onedir Archive(zip)
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-windows-amd64.zip
path: artifacts/pkgs/incoming
- name: Download Windows x86 Onedir Archive
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-windows-x86.tar.xz
path: artifacts/pkgs/incoming
- name: Download Windows amd64 Onedir Archive(zip)
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-onedir-windows-x86.zip
path: artifacts/pkgs/incoming
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text | jq .default_key -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text| jq .default_passphrase -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Create Repository Path
run: |
mkdir -p artifacts/pkgs/repo
- name: Download `salt-archive-keyring.gpg`
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/salt-archive-keyring-gpg-file \
--query SecretString --output text| jq .base64 -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o ~/salt-archive-keyring.gpg -d -
rm "${SECRETS_KEY_FILE}"
- name: Create Repository
run: |
tools pkg-repo onedir --key-id=${{ matrix.key-id }} \
${{ inputs.nightly-build && '--nightly-build' || '' }} --salt-version=${{ inputs.salt-version }} \
${{ inputs.rc-build && '--rc-build' || '' }} --incoming=artifacts/pkgs/incoming \
--repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.environment }}-repo
path: artifacts/pkgs/repo/*
retention-days: 7
if-no-files-found: error

80
.github/workflows/build-repos.yml vendored Normal file
View file

@ -0,0 +1,80 @@
name: Build Salt Packages
on:
workflow_call:
inputs:
environment:
type: string
description: On which GitHub Environment Context To Run
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
nightly-build:
type: boolean
default: false
rc-build:
type: boolean
default: false
secrets:
SECRETS_KEY:
required: true
env:
COLUMNS: 160
jobs:
build-deb:
name: Build DEB Repositories
uses: ./.github/workflows/build-deb-repo.yml
with:
environment: ${{ inputs.environment }}
salt-version: "${{ inputs.salt-version }}"
nightly-build: ${{ inputs.nightly-build }}
rc-build: ${{ inputs.rc-build }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
build-rpm:
name: Build RPM Repositories
uses: ./.github/workflows/build-rpm-repo.yml
with:
environment: ${{ inputs.environment }}
salt-version: "${{ inputs.salt-version }}"
nightly-build: ${{ inputs.nightly-build }}
rc-build: ${{ inputs.rc-build }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
build-windows:
name: Build Windows Repositories
uses: ./.github/workflows/build-windows-repo.yml
with:
environment: ${{ inputs.environment }}
salt-version: "${{ inputs.salt-version }}"
nightly-build: ${{ inputs.nightly-build }}
rc-build: ${{ inputs.rc-build }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
build-macos:
name: Build macOS Repositories
uses: ./.github/workflows/build-macos-repo.yml
with:
environment: ${{ inputs.environment }}
salt-version: "${{ inputs.salt-version }}"
nightly-build: ${{ inputs.nightly-build }}
rc-build: ${{ inputs.rc-build }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
build-onedir:
name: Build Onedir Repositories
uses: ./.github/workflows/build-onedir-repo.yml
with:
environment: ${{ inputs.environment }}
salt-version: "${{ inputs.salt-version }}"
nightly-build: ${{ inputs.nightly-build }}
rc-build: ${{ inputs.rc-build }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}

View file

@ -27,7 +27,7 @@ jobs:
- aarch64
container:
image: ghcr.io/saltstack/salt-ci-containers/centos:stream9
image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9
steps:
- uses: actions/checkout@v3
@ -37,8 +37,6 @@ jobs:
with:
name: salt-${{ inputs.salt-version }}.diff
- name: Download System Dependencies
run: |
yum -y update
@ -66,7 +64,7 @@ jobs:
- name: Upload RPMs
uses: actions/upload-artifact@v3
with:
name: salt-${{ matrix.arch }}-rpms
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-pkgs
path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm
retention-days: 7
if-no-files-found: error

160
.github/workflows/build-rpm-repo.yml vendored Normal file
View file

@ -0,0 +1,160 @@
name: Build RPM Repository
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
nightly-build:
type: boolean
default: false
rc-build:
type: boolean
default: false
environment:
type: string
description: On which GitHub Environment Context To Run
secrets:
SECRETS_KEY:
required: true
env:
COLUMNS: 160
jobs:
build-repo:
name: RPM
environment: ${{ inputs.environment }}
runs-on:
- self-hosted
- linux
- repo-${{ inputs.environment }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
include:
- distro: amazon
version: "2"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: redhat
version: "7"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: redhat
version: "8"
arch: x86_64
key-id: 0E08A149DE57BFBE
- distro: redhat
version: "9"
arch: x86_64
key-id: 37A710479D30D7B6
steps:
- uses: actions/checkout@v3
- name: Download System Dependencies
run: |
sudo apt update
sudo apt install -y rpm
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Download RPM Packages
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-pkgs
path: artifacts/pkgs/incoming
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
if: ${{ matrix.key-id == '0E08A149DE57BFBE' }}
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text | jq .default_key -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text| jq .default_passphrase -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Get Secrets
if: ${{ matrix.key-id == '37A710479D30D7B6' }}
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256 \
--query SecretString --output text | jq .default_key -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256 \
--query SecretString --output text| jq .default_passphrase -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Create Repository Path
run: |
mkdir -p artifacts/pkgs/repo
- name: Download `salt-archive-keyring.gpg`
if: ${{ matrix.key-id == '0E08A149DE57BFBE' }}
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/saltstack-gpg-key-file \
--query SecretString --output text| jq .base64 -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o ~/SALTSTACK-GPG-KEY.pub -d -
rm "${SECRETS_KEY_FILE}"
- name: Download `salt-archive-keyring.gpg`
if: ${{ matrix.key-id == '37A710479D30D7B6' }}
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/saltstack-gpg-key2-file \
--query SecretString --output text| jq .base64 -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o ~/SALTSTACK-GPG-KEY2.pub -d -
rm "${SECRETS_KEY_FILE}"
- name: Create Repository
run: |
tools pkg-repo rpm --key-id=${{ matrix.key-id }} --distro-arch=${{ matrix.arch }} \
${{ inputs.nightly-build && '--nightly-build' || '' }} --salt-version=${{ inputs.salt-version }} \
${{ inputs.rc-build && '--rc-build' || '' }} --distro=${{ matrix.distro }} \
--distro-version=${{ matrix.version }} \
--incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.environment }}-repo
path: artifacts/pkgs/repo/*
retention-days: 7
if-no-files-found: error

View file

@ -59,6 +59,7 @@ jobs:
platform: linux
arch: ${{ matrix.arch }}
cache-seed: ${{ inputs.cache-seed }}
salt-version: "${{ inputs.salt-version }}"
build-salt-windows:
@ -101,6 +102,7 @@ jobs:
platform: windows
arch: ${{ matrix.arch }}
cache-seed: ${{ inputs.cache-seed }}
salt-version: "${{ inputs.salt-version }}"
build-salt-macos:
@ -141,3 +143,4 @@ jobs:
platform: darwin
arch: ${{ matrix.arch }}
cache-seed: ${{ inputs.cache-seed }}
salt-version: "${{ inputs.salt-version }}"

View file

@ -52,18 +52,12 @@ jobs:
run: |
& pkg/windows/build.cmd -Architecture ${{ matrix.arch }} -Version ${{ inputs.salt-version }} -CICD -SkipInstall
- name: Upload ${{ matrix.arch }} NSIS Package
- name: Upload ${{ matrix.arch }} Packages
uses: actions/upload-artifact@v3
with:
name: Windows ${{ matrix.arch }} NSIS Package
path: pkg/windows/build/Salt-*.exe
retention-days: 7
if-no-files-found: error
- name: Upload ${{ matrix.arch }} MSI Package
uses: actions/upload-artifact@v3
with:
name: Windows ${{ matrix.arch }} MSI Package
path: pkg/windows/build/Salt-*.msi
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-windows-pkgs
path: |
pkg/windows/build/Salt-*.exe
pkg/windows/build/Salt-*.msi
retention-days: 7
if-no-files-found: error

114
.github/workflows/build-windows-repo.yml vendored Normal file
View file

@ -0,0 +1,114 @@
name: Build Windows Repository
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
nightly-build:
type: boolean
default: false
rc-build:
type: boolean
default: false
environment:
type: string
description: On which GitHub Environment Context To Run
secrets:
SECRETS_KEY:
required: true
env:
COLUMNS: 160
jobs:
build-repo:
name: Windows
environment: ${{ inputs.environment }}
runs-on:
- self-hosted
- linux
- repo-${{ inputs.environment }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
key-id:
- "0E08A149DE57BFBE"
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Download Windows x86 Packages
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-x86-windows-pkgs
path: artifacts/pkgs/incoming
- name: Download Windows amd64 Packages
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-amd64-windows-pkgs
path: artifacts/pkgs/incoming
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text | jq .default_key -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys \
--query SecretString --output text| jq .default_passphrase -r \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Create Repository Path
run: |
mkdir -p artifacts/pkgs/repo
- name: Download `salt-archive-keyring.gpg`
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/salt-archive-keyring-gpg-file \
--query SecretString --output text| jq .base64 -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o ~/salt-archive-keyring.gpg -d -
rm "${SECRETS_KEY_FILE}"
- name: Create Repository
run: |
tools pkg-repo windows --key-id=${{ matrix.key-id }} \
${{ inputs.nightly-build && '--nightly-build' || '' }} --salt-version=${{ inputs.salt-version }} \
${{ inputs.rc-build && '--rc-build' || '' }} --incoming=artifacts/pkgs/incoming \
--repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
uses: actions/upload-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.environment }}-repo
path: artifacts/pkgs/repo/*
retention-days: 7
if-no-files-found: error

View file

@ -8,7 +8,7 @@ on:
env:
COLUMNS: 160
CACHE_SEED: SEED-4 # Bump the number to invalidate all caches
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
permissions:
@ -252,7 +252,6 @@ jobs:
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
build-salt-onedir:
name: Build Salt Onedir
needs:
@ -266,7 +265,6 @@ jobs:
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
build-pkgs:
name: Build Salt Packages
needs:

726
.github/workflows/nightly.yml vendored Normal file
View file

@ -0,0 +1,726 @@
# Do not edit these workflows directly as the changes made will be overwritten.
# Instead, edit the template '.github/workflows/templates/nightly.yml.j2'
---
name: Nightly
on:
schedule:
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
- cron: '0 3 * * *' # Every day at 3AM
env:
COLUMNS: 160
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: false
jobs:
prepare-workflow:
name: Prepare Workflow Run
runs-on: ubuntu-latest
outputs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Full clone to also get the tags to get the right salt version
- name: Get Changed Files
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
id: changed-files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
repo:
- added|modified:
- '**'
doc-requirements:
- added|modified: &doc_requirements
- requirements/static/ci/py3.*/docs.txt
lint-requirements:
- added|modified: &lint_requirements
- requirements/static/ci/py3.*/lint.txt
pkg_requirements:
- added|modified: &pkg_requirements
- requirements/static/pkg/py3.*/darwin.txt
- requirements/static/pkg/py3.*/linux.txt
- requirements/static/pkg/py3.*/freebsd.txt
- requirements/static/pkg/py3.*/windows.txt
test_requirements:
- added|modified: &test_requirements
- requirements/static/ci/py3.*/darwin.txt
- requirements/static/ci/py3.*/linux.txt
- requirements/static/ci/py3.*/freebsd.txt
- requirements/static/ci/py3.*/windows.txt
- requirements/static/ci/py3.*/darwin-crypto.txt
- requirements/static/ci/py3.*/linux-crypto.txt
- requirements/static/ci/py3.*/freebsd-crypto.txt
- requirements/static/ci/py3.*/windows-crypto.txt
deleted:
- deleted:
- '**'
docs:
- added|modified:
- doc/**
- *doc_requirements
salt:
- added|modified: &salt_added_modified
- setup.py
- noxfile.py
- salt/**/*.py
- tasks/**/*.py
- tools/**/*.py
tests:
- added|modified: &tests_added_modified
- tests/**/*.py
lint:
- added|modified:
- .pylintrc
- *lint_requirements
golden_images:
- added|modified:
- cicd/golden-images.json
testrun:
- added|modified:
- *salt_added_modified
- *tests_added_modified
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Pretty Print The GH Actions Event
run:
tools ci print-gh-event
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: ""
- name: Write Changed Files To A Local File
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
run:
echo '${{ toJSON(steps.changed-files.outputs) }}' > changed-files.json
- name: Check Local Changed Files Contents
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
run:
cat changed-files.json
- name: Process Changed Files
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
id: process-changed-files
run:
tools ci process-changed-files ${{ github.event_name }} changed-files.json
- name: Check Collected Changed Files
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
run:
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
- name: Define Jobs To Run
id: define-jobs
run:
tools ci define-jobs ${{ github.event_name }}
- name: Check Collected Jobs
run:
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
- name: Define Testrun
id: define-testrun
run:
tools ci define-testrun ${{ github.event_name }} changed-files.json
- name: Check Defined Test Run
run:
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
- name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
run:
cat testrun-changed-files.txt || true
- name: Upload testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
uses: actions/upload-artifact@v3
with:
name: testrun-changed-files.txt
path: testrun-changed-files.txt
- name: Set Cache Seed Output
id: set-cache-seed
run: |
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
pre-commit:
name: Pre-Commit
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
uses: ./.github/workflows/pre-commit-action.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
docs:
name: Build Docs
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
uses: ./.github/workflows/docs-action.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
lint:
name: Lint
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
uses: ./.github/workflows/lint-action.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
prepare-release:
name: Prepare Release
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/prepare-release.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
build-source-tarball:
name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
needs:
- prepare-workflow
- prepare-release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
- name: Build Source Tarball
uses: ./.github/actions/build-source-tarball
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
build-deps-onedir:
name: Build Dependencies Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
build-salt-onedir:
name: Build Salt Onedir
needs:
- prepare-workflow
- build-deps-onedir
- build-source-tarball
uses: ./.github/workflows/build-salt-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
build-pkgs:
name: Build Salt Packages
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/build-packages.yml
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
windows-2016:
name: Windows 2016
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: windows-2016
nox-session: ci-test-onedir
platform: windows
arch: amd64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
windows-2019:
name: Windows 2019
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: windows-2019
nox-session: ci-test-onedir
platform: windows
arch: amd64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
windows-2022:
name: Windows 2022
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: windows-2022
nox-session: ci-test-onedir
platform: windows
arch: amd64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
macos-12:
name: macOS 12
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: macos-12
nox-session: ci-test-onedir
platform: darwin
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
almalinux-8:
name: Alma Linux 8
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: almalinux-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
almalinux-9:
name: Alma Linux 9
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: almalinux-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
amazonlinux-2:
name: Amazon Linux 2
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: amazonlinux-2
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
archlinux-lts:
name: Arch Linux LTS
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: archlinux-lts
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
centos-7:
name: CentOS 7
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
centosstream-8:
name: CentOS Stream 8
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: centosstream-8
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
centosstream-9:
name: CentOS Stream 9
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: centosstream-9
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
debian-10:
name: Debian 10
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: debian-10
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
debian-11:
name: Debian 11
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: debian-11
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
debian-11-arm64:
name: Debian 11 Arm64
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: debian-11-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
fedora-36:
name: Fedora 36
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: fedora-36
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
opensuse-15:
name: Opensuse 15
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: opensuse-15
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
photonos-3:
name: Photon OS 3
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-3
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
photonos-4:
name: Photon OS 4
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: photonos-4
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-1804:
name: Ubuntu 18.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: ubuntu-18.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2004:
name: Ubuntu 20.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: ubuntu-20.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2004-arm64:
name: Ubuntu 20.04 Arm64
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: ubuntu-20.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2204:
name: Ubuntu 22.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: ubuntu-22.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2204-arm64:
name: Ubuntu 22.04 Arm64
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
- prepare-workflow
- build-salt-onedir
uses: ./.github/workflows/test-action.yml
with:
distro-slug: ubuntu-22.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: aarch64
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
build-repositories:
name: Build Repositories
needs:
- prepare-workflow
- build-pkgs
uses: ./.github/workflows/build-repos.yml
with:
environment: nightly
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
nightly-build: true
rc-build: ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
publish-repositories:
name: Publish Repositories
needs:
- prepare-workflow
- build-repositories
- windows-2016
- windows-2019
- windows-2022
- macos-12
- almalinux-8
- almalinux-9
- amazonlinux-2
- archlinux-lts
- centos-7
- centosstream-8
- centosstream-9
- debian-10
- debian-11
- debian-11-arm64
- fedora-36
- opensuse-15
- photonos-3
- photonos-4
- ubuntu-1804
- ubuntu-2004
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
uses: ./.github/workflows/publish-repositories.yml
with:
environment: nightly
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
secrets: inherit
set-pipeline-exit-status:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always()
runs-on: ubuntu-latest
needs:
- prepare-workflow
- pre-commit
- docs
- lint
- build-deps-onedir
- build-salt-onedir
- build-pkgs
- build-repositories
- publish-repositories
steps:
- name: Get workflow information
id: get-workflow-info
uses: technote-space/workflow-conclusion-action@v3
- name: Set Pipeline Exit Status
shell: bash
run: |
if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then
exit 1
else
exit 0
fi
- name: Done
if: always()
run:
echo "All worflows finished"

View file

@ -0,0 +1,41 @@
name: Deploy Nightly
on:
workflow_call:
inputs:
environment:
type: string
description: On which GitHub Environment Context To Run
salt-version:
type: string
required: true
description: The Salt version
env:
COLUMNS: 160
jobs:
publish-repositories:
name: Publish Reporitories
runs-on:
- self-hosted
- linux
- repo-${{ inputs.environment }}
environment: ${{ inputs.environment }}
steps:
- name: Download Repository Artifact
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.environment }}-repo
path: repo/
- name: Show Repository
run: |
tree -a repo/
- name: Upload Repository Contents(${{ inputs.environment }})
run: |
aws s3 cp --acl bucket-owner-full-control --recursive repo/ s3://salt-project-prod-salt-artifacts-${{ inputs.environment }}

View file

@ -10,7 +10,7 @@ on:
env:
COLUMNS: 160
CACHE_SEED: SEED-4 # Bump the number to invalidate all caches
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
permissions:
@ -250,7 +250,6 @@ jobs:
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
build-salt-onedir:
name: Build Salt Onedir
needs:
@ -264,7 +263,6 @@ jobs:
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
build-pkgs:
name: Build Salt Packages
needs:

View file

@ -9,6 +9,8 @@ on:
<%- block jobs %>
<{- super() }>
<%- if includes.get('pre-commit', True) %>
pre-commit:
<%- do conclusion_needs.append('pre-commit') %>
name: Pre-Commit
@ -19,6 +21,11 @@ on:
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
<%- endif %>
<%- if includes.get('docs', True) %>
docs:
<%- do conclusion_needs.append('docs') %>
name: Build Docs
@ -29,6 +36,11 @@ on:
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
<%- endif %>
<%- if includes.get('lint', True) %>
lint:
<%- do conclusion_needs.append('lint') %>
name: Lint
@ -39,6 +51,11 @@ on:
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
<%- endif %>
<%- if includes.get('prepare-release', True) %>
prepare-release:
name: Prepare Release
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
@ -49,6 +66,11 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
<%- endif %>
<%- if includes.get('build-source-tarball', True) %>
build-source-tarball:
name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
@ -78,6 +100,11 @@ on:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
<%- endif %>
<%- if includes.get('build-deps-onedir', True) %>
build-deps-onedir:
<%- do conclusion_needs.append('build-deps-onedir') %>
name: Build Dependencies Onedir
@ -91,6 +118,10 @@ on:
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
<%- endif %>
<%- if includes.get('build-salt-onedir', True) %>
build-salt-onedir:
<%- do conclusion_needs.append('build-salt-onedir') %>
@ -106,6 +137,10 @@ on:
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
<%- endif %>
<%- if includes.get('build-pkgs', True) %>
build-pkgs:
<%- do conclusion_needs.append('build-pkgs') %>
@ -119,8 +154,13 @@ on:
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
<%- endif %>
<%- if includes.get('salt-tests', True) %>
windows-2016:
<%- do conclusion_needs.append('windows-2016') %>
<%- do test_salt_needs.append('windows-2016') %>
name: Windows 2016
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -137,7 +177,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
windows-2019:
<%- do conclusion_needs.append('windows-2019') %>
<%- do test_salt_needs.append('windows-2019') %>
name: Windows 2019
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -154,7 +194,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
windows-2022:
<%- do conclusion_needs.append('windows-2022') %>
<%- do test_salt_needs.append('windows-2022') %>
name: Windows 2022
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -171,7 +211,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
macos-12:
<%- do conclusion_needs.append('macos-12') %>
<%- do test_salt_needs.append('macos-12') %>
name: macOS 12
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
needs:
@ -188,7 +228,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
almalinux-8:
<%- do conclusion_needs.append('almalinux-8') %>
<%- do test_salt_needs.append('almalinux-8') %>
name: Alma Linux 8
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -205,7 +245,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
almalinux-9:
<%- do conclusion_needs.append('almalinux-9') %>
<%- do test_salt_needs.append('almalinux-9') %>
name: Alma Linux 9
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -222,7 +262,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
amazonlinux-2:
<%- do conclusion_needs.append('amazonlinux-2') %>
<%- do test_salt_needs.append('amazonlinux-2') %>
name: Amazon Linux 2
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -239,7 +279,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
archlinux-lts:
<%- do conclusion_needs.append('archlinux-lts') %>
<%- do test_salt_needs.append('archlinux-lts') %>
name: Arch Linux LTS
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -256,7 +296,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
centos-7:
<%- do conclusion_needs.append('centos-7') %>
<%- do test_salt_needs.append('centos-7') %>
name: CentOS 7
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -273,7 +313,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
centosstream-8:
<%- do conclusion_needs.append('centosstream-8') %>
<%- do test_salt_needs.append('centosstream-8') %>
name: CentOS Stream 8
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -290,7 +330,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
centosstream-9:
<%- do conclusion_needs.append('centosstream-9') %>
<%- do test_salt_needs.append('centosstream-9') %>
name: CentOS Stream 9
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -307,7 +347,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
debian-10:
<%- do conclusion_needs.append('debian-10') %>
<%- do test_salt_needs.append('debian-10') %>
name: Debian 10
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -324,7 +364,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
debian-11:
<%- do conclusion_needs.append('debian-11') %>
<%- do test_salt_needs.append('debian-11') %>
name: Debian 11
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -341,7 +381,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
debian-11-arm64:
<%- do conclusion_needs.append('debian-11-arm64') %>
<%- do test_salt_needs.append('debian-11-arm64') %>
name: Debian 11 Arm64
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -358,7 +398,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
fedora-36:
<%- do conclusion_needs.append('fedora-36') %>
<%- do test_salt_needs.append('fedora-36') %>
name: Fedora 36
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -375,7 +415,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
opensuse-15:
<%- do conclusion_needs.append('opensuse-15') %>
<%- do test_salt_needs.append('opensuse-15') %>
name: Opensuse 15
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -392,7 +432,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
photonos-3:
<%- do conclusion_needs.append('photonos-3') %>
<%- do test_salt_needs.append('photonos-3') %>
name: Photon OS 3
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -409,7 +449,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
photonos-4:
<%- do conclusion_needs.append('photonos-4') %>
<%- do test_salt_needs.append('photonos-4') %>
name: Photon OS 4
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -426,7 +466,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-1804:
<%- do conclusion_needs.append('ubuntu-1804') %>
<%- do test_salt_needs.append('ubuntu-1804') %>
name: Ubuntu 18.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -443,7 +483,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2004:
<%- do conclusion_needs.append('ubuntu-2004') %>
<%- do test_salt_needs.append('ubuntu-2004') %>
name: Ubuntu 20.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -460,7 +500,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2004-arm64:
<%- do conclusion_needs.append('ubuntu-2004-arm64') %>
<%- do test_salt_needs.append('ubuntu-2004-arm64') %>
name: Ubuntu 20.04 Arm64
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -477,7 +517,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2204:
<%- do conclusion_needs.append('ubuntu-2204') %>
<%- do test_salt_needs.append('ubuntu-2204') %>
name: Ubuntu 22.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -494,7 +534,7 @@ on:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
ubuntu-2204-arm64:
<%- do conclusion_needs.append('ubuntu-2204-arm64') %>
<%- do test_salt_needs.append('ubuntu-2204-arm64') %>
name: Ubuntu 22.04 Arm64
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
needs:
@ -509,4 +549,7 @@ on:
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
<%- endif %>
<%- endblock jobs %>

View file

@ -1,5 +1,6 @@
# Do not edit these workflows directly as the changes made will be overwritten.
# Instead, edit the template '<{ template }>'
<%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %>
---
name: <{ workflow_name }>
@ -15,7 +16,7 @@ on:
env:
COLUMNS: 160
CACHE_SEED: SEED-4 # Bump the number to invalidate all caches
CACHE_SEED: SEED-5 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"
<%- endblock env %>
@ -138,7 +139,7 @@ jobs:
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: ""
salt-version: "<{ prepare_workflow_salt_version_input }>"
- name: Write Changed Files To A Local File
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
@ -206,7 +207,10 @@ jobs:
if: always()
runs-on: ubuntu-latest
needs:
<%- for need in conclusion_needs %>
<%- for need in conclusion_needs.iter(consume=True) %>
- <{ need }>
<%- endfor %>
<%- for need in test_salt_needs.iter(consume=False) %>
- <{ need }>
<%- endfor %>
steps:

View file

@ -0,0 +1,61 @@
<%- set gh_environment = gh_environment|default("nightly") %>
<%- extends 'ci.yml.j2' %>
<%- block on %>
on:
schedule:
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
- cron: '0 3 * * *' # Every day at 3AM
<%- endblock on %>
<%- block concurrency %>
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: false
<%- endblock concurrency %>
<%- block jobs %>
<{- super() }>
build-repositories:
<%- do conclusion_needs.append('build-repositories') %>
name: Build Repositories
needs:
- prepare-workflow
- build-pkgs
uses: ./.github/workflows/build-repos.yml
with:
environment: <{ gh_environment }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
<%- if gh_environment == "nightly" %>
nightly-build: true
<%- else %>
nightly-build: false
<%- endif %>
rc-build: ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
publish-repositories:
<%- do conclusion_needs.append('publish-repositories') %>
name: Publish Repositories
needs:
- prepare-workflow
- build-repositories
<%- for need in test_salt_needs.iter(consume=True) %>
- <{ need }>
<%- endfor %>
uses: ./.github/workflows/publish-repositories.yml
with:
environment: <{ gh_environment }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
secrets: inherit
<%- endblock jobs %>
<%- block pipeline_exit_status %>
<{ super() }>
<%- endblock pipeline_exit_status %>

View file

@ -19,6 +19,7 @@ repos:
- boto3==1.21.46
- pyyaml==6.0
- jinja2==3.1.2
- packaging==23.0
- id: tools
alias: actionlint
name: Lint GitHub Actions Workflows
@ -32,6 +33,7 @@ repos:
- boto3==1.21.46
- pyyaml==6.0
- jinja2==3.1.2
- packaging==23.0
- repo: https://github.com/saltstack/pip-tools-compile-impersonate
rev: "4.6"

1
changelog/62168.changed Normal file
View file

@ -0,0 +1 @@
Disabled FQDNs grains on macOS by default

3
changelog/63316.added Normal file
View file

@ -0,0 +1,3 @@
Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id.
This allows users to enable state_events on a per use basis rather than having to
enable them globally for all state runs.

1
changelog/63596.fixed Normal file
View file

@ -0,0 +1 @@
LGPO: Added support for "Relax minimum password length limits"

View file

@ -171,7 +171,8 @@
# asynchronously, however, it still adds 5 seconds every time grains are
# generated if an IP does not resolve. In Windows grains are regenerated each
# time a new process is spawned. Therefore, the default for Windows is `False`.
# All other OSes default to `True`
# On macOS, FQDN resolution can be very slow, therefore the default for macOS is
# `False` as well. All other OSes default to `True`
# enable_fqdns_grains: True
# The minion can take a while to start up when lspci and/or dmidecode is used

View file

@ -1109,8 +1109,9 @@ adds 5 seconds every time grains are generated if an IP does not resolve. In Win
grains are regenerated each time a new process is spawned. Therefore, the default for
Windows is ``False``. In many cases this value does not make sense to include for proxy
minions as it will be FQDN for the host running the proxy minion process, so the default
for proxy minions is ``False```. All other OSes default to ``True``. This options was
added `here <https://github.com/saltstack/salt/pull/55581>`_.
for proxy minions is ``False```. On macOS, FQDN resolution can be very slow, therefore
the default for macOS is ``False`` as well. All other OSes default to ``True``.
This option was added `here <https://github.com/saltstack/salt/pull/55581>`_.
.. code-block:: yaml

View file

@ -795,7 +795,6 @@ In this example, `cmd.run` would be run only if either of the `file.managed`
states generated changes and at least one of the watched state's "result" is
``True``.
.. _requisites-fire-event:
Altering States
---------------
@ -1125,6 +1124,8 @@ salt/states/ file.
``mod_run_check_cmd`` is used to check for the check_cmd options. To override
this one, include a ``mod_run_check_cmd`` in the states file for the state.
.. _requisites-fire-event:
Fire Event Notifications
========================

View file

@ -90,8 +90,15 @@ Job events
.. salt:event:: salt/job/<JID>/prog/<MID>/<RUN NUM>
Fired each time a each function in a state run completes execution. Must be
enabled using the :conf_master:`state_events` option.
Fired each time a each function in a state run completes execution. Can
also be fired on individual state if the :ref:`fire_event <requisites-fire-event>`
option is set on that state.
Can be enabled for all state runs in the Salt master config with the
:conf_master:`state_events` option. To enable for an individual state
run, pass ``state_events=True`` to the :py:mod:`state <salt.modules.state>`
function being used.
:var data: The data returned from the state module function.
:var id: The minion ID.

View file

@ -24,6 +24,8 @@ jmespath==1.0.1
# botocore
markupsafe==2.1.2
# via jinja2
packaging==23.0
# via -r requirements/static/ci/tools.in
pygments==2.13.0
# via rich
python-dateutil==2.8.2

View file

@ -24,6 +24,8 @@ jmespath==1.0.1
# botocore
markupsafe==2.1.2
# via jinja2
packaging==23.0
# via -r requirements/static/ci/tools.in
pygments==2.13.0
# via rich
python-dateutil==2.8.2

View file

@ -3,3 +3,4 @@ attrs
boto3
pyyaml
jinja2
packaging

View file

@ -65,6 +65,12 @@ elif salt.utils.platform.is_proxy():
_DFLT_FQDNS_GRAINS = False
_MASTER_TRIES = 1
_MASTER_USER = salt.utils.user.get_user()
elif salt.utils.platform.is_darwin():
_DFLT_IPC_MODE = "ipc"
# fqdn resolution can be very slow on macOS, see issue #62168
_DFLT_FQDNS_GRAINS = False
_MASTER_TRIES = 1
_MASTER_USER = salt.utils.user.get_user()
else:
_DFLT_IPC_MODE = "ipc"
_DFLT_FQDNS_GRAINS = False

View file

@ -2806,6 +2806,7 @@ def fqdns():
or salt.utils.platform.is_sunos()
or salt.utils.platform.is_aix()
or salt.utils.platform.is_junos()
or salt.utils.platform.is_darwin()
else True,
):
opt = __salt__["network.fqdns"]()

View file

@ -705,6 +705,12 @@ def apply_(mods=None, **kwargs):
salt '*' state.apply localconfig=/path/to/minion.yml
state_events
The state_events option sends progress events as each function in
a state run completes execution.
.. versionadded:: 3006.0
.. rubric:: APPLYING INDIVIDUAL SLS FILES (A.K.A. :py:func:`STATE.SLS <salt.modules.state.sls>`)
@ -816,6 +822,12 @@ def apply_(mods=None, **kwargs):
module types.
.. versionadded:: 2017.7.8,2018.3.3,2019.2.0
state_events
The state_events option sends progress events as each function in
a state run completes execution.
.. versionadded:: 3006.0
"""
if mods:
return sls(mods, **kwargs)
@ -974,7 +986,7 @@ def run_request(name="default", **kwargs):
return {}
def highstate(test=None, queue=None, **kwargs):
def highstate(test=None, queue=None, state_events=None, **kwargs):
"""
Retrieve the state data from the salt master for this minion and execute it
@ -1072,6 +1084,12 @@ def highstate(test=None, queue=None, **kwargs):
.. versionadded:: 2015.8.4
state_events
The state_events option sends progress events as each function in
a state run completes execution.
.. versionadded:: 3006.0
CLI Examples:
.. code-block:: bash
@ -1128,6 +1146,9 @@ def highstate(test=None, queue=None, **kwargs):
"is specified."
)
if state_events is not None:
opts["state_events"] = state_events
try:
st_ = salt.state.HighState(
opts,
@ -1186,7 +1207,15 @@ def highstate(test=None, queue=None, **kwargs):
return ret
def sls(mods, test=None, exclude=None, queue=None, sync_mods=None, **kwargs):
def sls(
mods,
test=None,
exclude=None,
queue=None,
sync_mods=None,
state_events=None,
**kwargs
):
"""
Execute the states in one or more SLS files
@ -1296,6 +1325,12 @@ def sls(mods, test=None, exclude=None, queue=None, sync_mods=None, **kwargs):
.. versionadded:: 2017.7.8,2018.3.3,2019.2.0
state_events
The state_events option sends progress events as each function in
a state run completes execution.
.. versionadded:: 3006.0
CLI Example:
.. code-block:: bash
@ -1382,6 +1417,9 @@ def sls(mods, test=None, exclude=None, queue=None, sync_mods=None, **kwargs):
except KeyError:
log.warning("Invalid custom module type '%s', ignoring", module_type)
if state_events is not None:
opts["state_events"] = state_events
try:
st_ = salt.state.HighState(
opts,
@ -1765,7 +1803,7 @@ def show_states(queue=None, **kwargs):
return list(states.keys())
def sls_id(id_, mods, test=None, queue=None, **kwargs):
def sls_id(id_, mods, test=None, queue=None, state_events=None, **kwargs):
"""
Call a single ID from the named module(s) and handle all requisites
@ -1835,6 +1873,9 @@ def sls_id(id_, mods, test=None, queue=None, **kwargs):
"is specified."
)
if state_events is not None:
opts["state_events"] = state_events
try:
st_ = salt.state.HighState(
opts,

View file

@ -1826,6 +1826,18 @@ class _policy_info:
},
"Transform": self.enabled_one_disabled_zero_no_not_defined_transform,
},
"RelaxMinimumPasswordLengthLimits": {
"Policy": "Relax minimum password length limits",
"lgpo_section": self.password_policy_gpedit_path,
"Settings": self.enabled_one_disabled_zero.keys(),
"Registry": {
"Hive": "HKEY_LOCAL_MACHINE",
"Path": "SYSTEM\\CurrentControlSet\\Control\\SAM",
"Value": "RelaxMinimumPasswordLengthLimits",
"Type": "REG_DWORD",
},
"Transform": self.enabled_one_disabled_zero_transform,
},
"AdminAccountStatus": {
"Policy": "Accounts: Administrator account status",
"Settings": self.enabled_one_disabled_zero_no_not_defined.keys(),

View file

@ -3742,7 +3742,9 @@ class BaseHighState:
)
opts["env_order"] = mopts.get("env_order", opts.get("env_order", []))
opts["default_top"] = mopts.get("default_top", opts.get("default_top"))
opts["state_events"] = mopts.get("state_events")
opts["state_events"] = (
opts.get("state_events") or mopts.get("state_events") or False
)
opts["state_aggregate"] = (
opts.get("state_aggregate") or mopts.get("state_aggregate") or False
)

View file

@ -107,6 +107,36 @@ def lgpo(modules):
"REG_SZ",
True,
),
(
"RelaxMinimumPasswordLengthLimits",
"Enabled",
"HKLM",
"SYSTEM\\CurrentControlSet\\Control\\SAM",
"RelaxMinimumPasswordLengthLimits",
1,
"REG_DWORD",
False,
),
(
"RelaxMinimumPasswordLengthLimits",
"Disabled",
"HKLM",
"SYSTEM\\CurrentControlSet\\Control\\SAM",
"RelaxMinimumPasswordLengthLimits",
0,
"REG_DWORD",
False,
),
(
"RelaxMinimumPasswordLengthLimits",
"Not Defined",
"HKLM",
"SYSTEM\\CurrentControlSet\\Control\\SAM",
"RelaxMinimumPasswordLengthLimits",
"0",
"REG_DWORD",
True,
),
],
)
def test_reg_policy(

View file

@ -0,0 +1,115 @@
"""
tests.pytests.integration.modules.state.test_state_state_events
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
import time
import pytest
log = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def configure_state_tree(salt_master, salt_minion):
top_file = """
base:
'{}':
- state-event
""".format(
salt_minion.id
)
state_event_sls = """
show_notification:
test.show_notification:
- text: Notification
"""
with salt_master.state_tree.base.temp_file(
"top.sls", top_file
), salt_master.state_tree.base.temp_file("state-event.sls", state_event_sls):
yield
@pytest.fixture(scope="module")
def state_event_tag():
"""
State event tag to match
"""
return "salt/job/*/prog/{}/0"
def test_highstate_state_events(
event_listener,
salt_master,
salt_minion,
salt_call_cli,
configure_state_tree,
state_event_tag,
):
"""
Test state.highstate with state_events=True
"""
start_time = time.time()
ret = salt_call_cli.run("state.highstate", state_events=True)
assert ret.returncode == 0
assert ret.data
event_pattern = (salt_master.id, state_event_tag.format(salt_minion.id))
matched_events = event_listener.wait_for_events(
[event_pattern], after_time=start_time, timeout=30
)
assert matched_events.found_all_events
def test_sls_state_events(
event_listener,
salt_master,
salt_minion,
salt_call_cli,
configure_state_tree,
state_event_tag,
):
"""
Test state.sls with state_events=True
"""
start_time = time.time()
ret = salt_call_cli.run("state.sls", "state-event", state_events=True)
assert ret.returncode == 0
assert ret.data
event_pattern = (salt_master.id, state_event_tag.format(salt_minion.id))
matched_events = event_listener.wait_for_events(
[event_pattern], after_time=start_time, timeout=30
)
assert matched_events.found_all_events
def test_sls_id_state_events(
event_listener,
salt_master,
salt_minion,
salt_call_cli,
configure_state_tree,
state_event_tag,
):
"""
Test state.sls_id with state_events=True
"""
start_time = time.time()
ret = salt_call_cli.run(
"state.sls_id", "show_notification", "state-event", state_events=True
)
assert ret.returncode == 0
assert ret.data
event_pattern = (salt_master.id, state_event_tag.format(salt_minion.id))
matched_events = event_listener.wait_for_events(
[event_pattern], after_time=start_time, timeout=30
)
assert matched_events.found_all_events

View file

@ -2144,8 +2144,8 @@ def test_enable_fqdns_without_patching():
"salt.grains.core.__salt__",
{"network.fqdns": MagicMock(return_value="my.fake.domain")},
):
# fqdns is disabled by default on Windows
if salt.utils.platform.is_windows():
# fqdns is disabled by default on Windows and macOS
if salt.utils.platform.is_windows() or salt.utils.platform.is_darwin():
assert core.fqdns() == {"fqdns": []}
else:
assert core.fqdns() == "my.fake.domain"

View file

@ -4,6 +4,7 @@ import tools.changelog
import tools.ci
import tools.docs
import tools.pkg
import tools.pkgrepo
import tools.pre_commit
import tools.vm

904
tools/pkgrepo.py Normal file
View file

@ -0,0 +1,904 @@
"""
These commands are used to build the pacakge repository files.
"""
# pylint: disable=resource-leakage,broad-except
from __future__ import annotations
import hashlib
import json
import logging
import pathlib
import shutil
import textwrap
from datetime import datetime
from typing import TYPE_CHECKING
import packaging.version
from ptscripts import Context, command_group
log = logging.getLogger(__name__)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
# Define the command group
pkg = command_group(
name="pkg-repo", help="Packaging Repository Related Commands", description=__doc__
)
@pkg.command(
name="deb",
arguments={
"salt_version": {
"help": (
"The salt version for which to build the repository configuration files. "
"If not passed, it will be discovered by running 'python3 salt/version.py'."
),
"required": True,
},
"distro": {
"help": "The debian based distribution to build the repository for",
"choices": ("debian", "ubuntu"),
"required": True,
},
"distro_version": {
"help": "The distro version.",
"required": True,
},
"distro_arch": {
"help": "The distribution architecture",
"choices": ("x86_64", "amd64", "aarch64", "arm64"),
},
"repo_path": {
"help": "Path where the repository shall be created.",
"required": True,
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
"incoming": {
"help": (
"The path to the directory containing the files that should added to "
"the repository."
),
"required": True,
},
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def debian(
ctx: Context,
salt_version: str = None,
distro: str = None,
distro_version: str = None,
incoming: pathlib.Path = None,
repo_path: pathlib.Path = None,
key_id: str = None,
distro_arch: str = "amd64",
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the debian repository.
"""
if TYPE_CHECKING:
assert salt_version is not None
assert distro is not None
assert distro_version is not None
assert incoming is not None
assert repo_path is not None
assert key_id is not None
distro_info = {
"debian": {
"10": {
"label": "deb10ary",
"codename": "buster",
"suitename": "oldstable",
},
"11": {
"label": "deb11ary",
"codename": "bullseye",
"suitename": "stable",
},
},
"ubuntu": {
"18.04": {
"label": "salt_ubuntu1804",
"codename": "bionic",
},
"20.04": {
"label": "salt_ubuntu2004",
"codename": "focal",
},
"22.04": {
"label": "salt_ubuntu2204",
"codename": "jammy",
},
},
}
display_name = f"{distro.capitalize()} {distro_version}"
if distro_version not in distro_info[distro]:
ctx.error(f"Support for {display_name} is missing.")
ctx.exit(1)
if distro_arch == "x86_64":
ctx.info(f"The {distro_arch} arch is an alias for 'amd64'. Adjusting.")
distro_arch = "amd64"
if distro_arch == "aarch64":
ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.")
distro_arch = "arm64"
distro_details = distro_info[distro][distro_version]
ctx.info("Distribution Details:")
ctx.info(distro_details)
if TYPE_CHECKING:
assert isinstance(distro_details["label"], str)
assert isinstance(distro_details["codename"], str)
assert isinstance(distro_details["suitename"], str)
label: str = distro_details["label"]
codename: str = distro_details["codename"]
salt_archive_keyring_gpg_file = (
pathlib.Path("~/salt-archive-keyring.gpg").expanduser().resolve()
)
if not salt_archive_keyring_gpg_file:
ctx.error(f"The file '{salt_archive_keyring_gpg_file}' does not exist.")
ctx.exit(1)
ftp_archive_config_suite = ""
if distro == "debian":
suitename: str = distro_details["suitename"]
ftp_archive_config_suite = (
f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n"""
)
archive_description = f"SaltProject {display_name} Python 3{'' if nightly_build else ' development'} Salt package repo"
ftp_archive_config = f"""\
APT::FTPArchive::Release::Origin "SaltProject";
APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite}
APT::FTPArchive::Release::Codename "{codename}";
APT::FTPArchive::Release::Architectures "{distro_arch}";
APT::FTPArchive::Release::Components "main";
APT::FTPArchive::Release::Description "{archive_description}";
APT::FTPArchive::Release::Acquire-By-Hash "yes";
Dir {{
ArchiveDir ".";
}};
BinDirectory "pool" {{
Packages "dists/{codename}/main/binary-{distro_arch}/Packages";
Sources "dists/{codename}/main/source/Sources";
Contents "dists/{codename}/main/Contents-{distro_arch}";
}}
"""
ctx.info("Creating repository directory structure ...")
if nightly_build or rc_build:
create_repo_path = repo_path / "salt"
create_repo_path = create_repo_path / "py3" / distro / distro_version / distro_arch
if nightly_build is False:
create_repo_path = create_repo_path / "minor" / salt_version
else:
create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d")
create_repo_path.mkdir(exist_ok=True, parents=True)
ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf"
ctx.info(f"Writing {ftp_archive_config_file} ...")
ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config))
ctx.info(f"Copying {salt_archive_keyring_gpg_file} to {create_repo_path} ...")
shutil.copyfile(
salt_archive_keyring_gpg_file,
create_repo_path / salt_archive_keyring_gpg_file.name,
)
pool_path = create_repo_path / "pool"
pool_path.mkdir(exist_ok=True)
for fpath in incoming.iterdir():
dpath = pool_path / fpath.name
ctx.info(f"Copying {fpath} to {dpath} ...")
shutil.copyfile(fpath, dpath)
if fpath.suffix == ".dsc":
ctx.info(f"Running 'debsign' on {dpath} ...")
ctx.run("debsign", "--re-sign", "-k", key_id, str(dpath), interactive=True)
dists_path = create_repo_path / "dists"
symlink_parent_path = dists_path / codename / "main"
symlink_paths = (
symlink_parent_path / "by-hash" / "SHA256",
symlink_parent_path / "source" / "by-hash" / "SHA256",
symlink_parent_path / f"binary-{distro_arch}" / "by-hash" / "SHA256",
)
for path in symlink_paths:
path.mkdir(exist_ok=True, parents=True)
cmdline = ["apt-ftparchive", "generate", "apt-ftparchive.conf"]
ctx.info(f"Running '{' '.join(cmdline)}' ...")
ctx.run(*cmdline, cwd=create_repo_path)
ctx.info("Creating by-hash symlinks ...")
for path in symlink_paths:
for fpath in path.parent.parent.iterdir():
if not fpath.is_file():
continue
sha256sum = ctx.run("sha256sum", str(fpath), capture=True)
link = path / sha256sum.stdout.decode().split()[0]
link.symlink_to(f"../../{fpath.name}")
cmdline = [
"apt-ftparchive",
"--no-md5",
"--no-sha1",
"--no-sha512",
"release",
"-c",
"apt-ftparchive.conf",
f"dists/{codename}/",
]
ctx.info(f"Running '{' '.join(cmdline)}' ...")
ret = ctx.run(*cmdline, capture=True, cwd=create_repo_path)
release_file = dists_path / codename / "Release"
ctx.info(f"Writing {release_file} with the output of the previous command...")
release_file.write_bytes(ret.stdout)
cmdline = [
"gpg",
"-u",
key_id,
"-o",
f"dists/{codename}/InRelease",
"-a",
"-s",
"--clearsign",
f"dists/{codename}/Release",
]
ctx.info(f"Running '{' '.join(cmdline)}' ...")
ctx.run(*cmdline, cwd=create_repo_path)
cmdline = [
"gpg",
"-u",
key_id,
"-o",
f"dists/{codename}/Release.gpg",
"-a",
"-b",
"-s",
f"dists/{codename}/Release",
]
ctx.info(f"Running '{' '.join(cmdline)}' ...")
ctx.run(*cmdline, cwd=create_repo_path)
if nightly_build is False:
ctx.info("Creating '<major-version>' and 'latest' symlinks ...")
major_version = packaging.version.parse(salt_version).major
major_link = create_repo_path.parent.parent / str(major_version)
major_link.symlink_to(f"minor/{salt_version}")
latest_link = create_repo_path.parent.parent / "latest"
latest_link.symlink_to(f"minor/{salt_version}")
else:
ctx.info("Creating 'latest' symlink ...")
latest_link = create_repo_path.parent / "latest"
latest_link.symlink_to(create_repo_path.name)
ctx.info("Done")
@pkg.command(
name="rpm",
arguments={
"salt_version": {
"help": (
"The salt version for which to build the repository configuration files. "
"If not passed, it will be discovered by running 'python3 salt/version.py'."
),
"required": True,
},
"distro": {
"help": "The debian based distribution to build the repository for",
"choices": ("amazon", "redhat"),
"required": True,
},
"distro_version": {
"help": "The distro version.",
"required": True,
},
"distro_arch": {
"help": "The distribution architecture",
"choices": ("x86_64", "aarch64", "arm64"),
},
"repo_path": {
"help": "Path where the repository shall be created.",
"required": True,
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
"incoming": {
"help": (
"The path to the directory containing the files that should added to "
"the repository."
),
"required": True,
},
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def rpm(
ctx: Context,
salt_version: str = None,
distro: str = None,
distro_version: str = None,
incoming: pathlib.Path = None,
repo_path: pathlib.Path = None,
key_id: str = None,
distro_arch: str = "amd64",
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the redhat repository.
"""
if TYPE_CHECKING:
assert salt_version is not None
assert distro is not None
assert distro_version is not None
assert incoming is not None
assert repo_path is not None
assert key_id is not None
distro_info = {
"amazon": ["2"],
"redhat": ["7", "8", "9"],
}
display_name = f"{distro.capitalize()} {distro_version}"
if distro_version not in distro_info[distro]:
ctx.error(f"Support for {display_name} is missing.")
ctx.exit(1)
if distro_arch == "aarch64":
ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.")
distro_arch = "arm64"
if key_id == "0E08A149DE57BFBE":
saltstack_gpg_key_file = (
pathlib.Path("~/SALTSTACK-GPG-KEY.pub").expanduser().resolve()
)
else:
saltstack_gpg_key_file = (
pathlib.Path("~/SALTSTACK-GPG-KEY2.pub").expanduser().resolve()
)
if not saltstack_gpg_key_file.exists():
ctx.error(f"The file '{saltstack_gpg_key_file}' does not exist.")
ctx.exit(1)
ctx.info("Creating repository directory structure ...")
if nightly_build or rc_build:
create_repo_path = repo_path / "salt"
create_repo_path = create_repo_path / "py3" / distro / distro_version / distro_arch
if nightly_build is False:
create_repo_path = create_repo_path / "minor" / salt_version
else:
create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d")
create_repo_path.joinpath("SRPMS").mkdir(exist_ok=True, parents=True)
ctx.info(f"Copying {saltstack_gpg_key_file} to {create_repo_path} ...")
shutil.copyfile(
saltstack_gpg_key_file,
create_repo_path / saltstack_gpg_key_file.name,
)
for fpath in incoming.iterdir():
if ".src" in fpath.suffixes:
dpath = create_repo_path / "SRPMS" / fpath.name
else:
dpath = create_repo_path / fpath.name
ctx.info(f"Copying {fpath} to {dpath} ...")
shutil.copyfile(fpath, dpath)
if fpath.suffix == ".rpm":
ctx.info(f"Running 'rpmsign' on {dpath} ...")
ctx.run("rpmsign", "--key-id", key_id, "--addsign", str(dpath))
createrepo = shutil.which("createrepo")
if createrepo is None:
container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9"
ctx.info(f"Using docker container '{container}' to call 'createrepo'...")
uid = ctx.run("id", "-u", capture=True).stdout.strip().decode()
gid = ctx.run("id", "-g", capture=True).stdout.strip().decode()
ctx.run(
"docker",
"run",
"--rm",
"-v",
f"{create_repo_path.resolve()}:/code",
"-u",
f"{uid}:{gid}",
"-w",
"/code",
container,
"createrepo",
".",
)
else:
ctx.run("createrepo", ".", cwd=create_repo_path)
def _create_repo_file(create_repo_path, url_suffix):
ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...")
if nightly_build:
base_url = "salt-dev/py3/"
repo_file_contents = "[salt-nightly-repo]"
elif rc_build:
base_url = "salt_rc/py3/"
repo_file_contents = "[salt-rc-repo]"
else:
base_url = "py3/"
repo_file_contents = "[salt-repo]"
base_url += f"{distro}/{url_suffix}"
if distro_version == "9":
gpg_key = f"{base_url}/SALTSTACK-GPG-KEY2.pub"
else:
gpg_key = f"{base_url}/SALTSTACK-GPG-KEY.pub"
if distro == "amazon":
distro_name = "Amazon Linux"
else:
distro_name = "RHEL/CentOS"
if int(distro_version) < 8:
failovermethod = "\n failovermethod=priority\n"
else:
failovermethod = ""
repo_file_contents += f"""
name=Salt repo for {distro_name} {distro_version} PY3
baseurl=https://repo.saltproject.io/{base_url}
skip_if_unavailable=True{failovermethod}
priority=10
enabled=1
enabled_metadata=1
gpgcheck=1
gpgkey={gpg_key}
"""
if nightly_build:
repo_file_path = create_repo_path.parent / "nightly.repo"
elif rc_build:
repo_file_path = create_repo_path.parent / "rc.repo"
else:
repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo"
_create_repo_file(repo_file_path, salt_version)
if nightly_build is False and rc_build is False:
ctx.info("Creating '<major-version>' and 'latest' symlinks ...")
major_version = packaging.version.parse(salt_version).major
major_link = create_repo_path.parent.parent / str(major_version)
major_link.symlink_to(f"minor/{salt_version}")
latest_link = create_repo_path.parent.parent / "latest"
latest_link.symlink_to(f"minor/{salt_version}")
for name in (major_version, "latest"):
repo_file_path = create_repo_path.parent.parent / f"{name}.repo"
_create_repo_file(repo_file_path, name)
else:
ctx.info("Creating 'latest' symlink and 'latest.repo' file ...")
latest_link = create_repo_path.parent / "latest"
latest_link.symlink_to(create_repo_path.name)
repo_file_path = create_repo_path.parent.parent / "latest.repo"
_create_repo_file(repo_file_path, "latest")
ctx.info("Done")
@pkg.command(
name="windows",
arguments={
"salt_version": {
"help": "The salt version for which to build the repository",
"required": True,
},
"repo_path": {
"help": "Path where the repository shall be created.",
"required": True,
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
"incoming": {
"help": (
"The path to the directory containing the files that should added to "
"the repository."
),
"required": True,
},
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def windows(
ctx: Context,
salt_version: str = None,
incoming: pathlib.Path = None,
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the windows repository.
"""
if TYPE_CHECKING:
assert salt_version is not None
assert incoming is not None
assert repo_path is not None
assert key_id is not None
salt_archive_keyring_gpg_file = (
pathlib.Path("~/salt-archive-keyring.gpg").expanduser().resolve()
)
if not salt_archive_keyring_gpg_file:
ctx.error(f"The file '{salt_archive_keyring_gpg_file}' does not exist.")
ctx.exit(1)
ctx.info("Creating repository directory structure ...")
if nightly_build or rc_build:
create_repo_path = repo_path / "salt"
create_repo_path = create_repo_path / "py3" / "windows"
repo_json_path = create_repo_path / "repo.json"
if nightly_build:
create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d")
create_repo_path.mkdir(parents=True, exist_ok=True)
ctx.info("Downloading any pre-existing 'repo.json' file")
if nightly_build:
bucket_name = "salt-project-prod-salt-artifacts-nightly"
else:
bucket_name = "salt-project-prod-salt-artifacts-staging"
bucket_url = (
f"s3://{bucket_name}/{create_repo_path.relative_to(repo_path)}/repo.json"
)
ret = ctx.run("aws", "s3", "cp", bucket_url, create_repo_path, check=False)
if ret.returncode:
repo_json = {}
else:
repo_json = json.loads(str(repo_json_path))
if salt_version not in repo_json:
repo_json[salt_version] = {}
hashes_base_path = create_repo_path / f"salt-{salt_version}"
for fpath in incoming.iterdir():
ctx.info(f"* Processing {fpath} ...")
dpath = create_repo_path / fpath.name
ctx.info(f"Copying {fpath} to {dpath} ...")
shutil.copyfile(fpath, dpath)
if "amd64" in dpath.name.lower():
arch = "amd64"
elif "x86" in dpath.name.lower():
arch = "x86"
else:
ctx.error(
f"Cannot pickup the right architecture from the filename '{dpath.name}'."
)
ctx.exit(1)
repo_json[salt_version][dpath.name] = {
"name": dpath.name,
"version": salt_version,
"os": "windows",
"arch": arch,
}
for hash_name in ("blake2b", "sha512", "sha3_512"):
ctx.info(f" * Calculating {hash_name} ...")
hexdigest = _get_file_checksum(fpath, hash_name)
repo_json[salt_version][dpath.name][hash_name.upper()] = hexdigest
with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh:
wfh.write(f"{hexdigest} {dpath.name}\n")
for fpath in create_repo_path.iterdir():
if fpath.suffix in (".msi", ".exe"):
continue
ctx.info("GPG Signing '{fpath.relative_to(repo_path)}' ...")
ctx.run("gpg", "-u", key_id, "-o" f"{fpath}.asc", "-a", "-b", "-s", str(fpath))
ctx.info(f"Copying {salt_archive_keyring_gpg_file} to {create_repo_path} ...")
shutil.copyfile(
salt_archive_keyring_gpg_file,
create_repo_path / salt_archive_keyring_gpg_file.name,
)
repo_json["latest"] = repo_json[salt_version]
repo_json_path.write_text(json.dumps(repo_json))
ctx.info("Creating 'latest' symlink ...")
latest_link = create_repo_path.parent / "latest"
latest_link.symlink_to(create_repo_path.name)
ctx.info("Done")
@pkg.command(
name="macos",
arguments={
"salt_version": {
"help": "The salt version for which to build the repository",
"required": True,
},
"repo_path": {
"help": "Path where the repository shall be created.",
"required": True,
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
"incoming": {
"help": (
"The path to the directory containing the files that should added to "
"the repository."
),
"required": True,
},
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def macos(
ctx: Context,
salt_version: str = None,
incoming: pathlib.Path = None,
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the windows repository.
"""
if TYPE_CHECKING:
assert salt_version is not None
assert incoming is not None
assert repo_path is not None
assert key_id is not None
salt_archive_keyring_gpg_file = (
pathlib.Path("~/salt-archive-keyring.gpg").expanduser().resolve()
)
if not salt_archive_keyring_gpg_file:
ctx.error(f"The file '{salt_archive_keyring_gpg_file}' does not exist.")
ctx.exit(1)
ctx.info("Creating repository directory structure ...")
if nightly_build or rc_build:
create_repo_path = repo_path / "salt"
create_repo_path = create_repo_path / "py3" / "macos"
repo_json_path = create_repo_path / "repo.json"
if nightly_build:
create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d")
create_repo_path.mkdir(parents=True, exist_ok=True)
ctx.info("Downloading any pre-existing 'repo.json' file")
if nightly_build:
bucket_name = "salt-project-prod-salt-artifacts-nightly"
else:
bucket_name = "salt-project-prod-salt-artifacts-staging"
bucket_url = (
f"s3://{bucket_name}/{create_repo_path.relative_to(repo_path)}/repo.json"
)
ret = ctx.run("aws", "s3", "cp", bucket_url, create_repo_path, check=False)
if ret.returncode:
repo_json = {}
else:
repo_json = json.loads(str(repo_json_path))
if salt_version not in repo_json:
repo_json[salt_version] = {}
hashes_base_path = create_repo_path / f"salt-{salt_version}"
for fpath in incoming.iterdir():
ctx.info(f"* Processing {fpath} ...")
dpath = create_repo_path / fpath.name
ctx.info(f"Copying {fpath} to {dpath} ...")
shutil.copyfile(fpath, dpath)
repo_json[salt_version][dpath.name] = {
"name": dpath.name,
"version": salt_version,
"os": "macos",
"arch": "x86_64",
}
for hash_name in ("blake2b", "sha512", "sha3_512"):
ctx.info(f" * Calculating {hash_name} ...")
hexdigest = _get_file_checksum(fpath, hash_name)
repo_json[salt_version][dpath.name][hash_name.upper()] = hexdigest
with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh:
wfh.write(f"{hexdigest} {dpath.name}\n")
for fpath in create_repo_path.iterdir():
if fpath.suffix in (".pkg",):
continue
ctx.info("GPG Signing '{fpath.relative_to(repo_path)}' ...")
ctx.run("gpg", "-u", key_id, "-o" f"{fpath}.asc", "-a", "-b", "-s", str(fpath))
ctx.info(f"Copying {salt_archive_keyring_gpg_file} to {create_repo_path} ...")
shutil.copyfile(
salt_archive_keyring_gpg_file,
create_repo_path / salt_archive_keyring_gpg_file.name,
)
repo_json["latest"] = repo_json[salt_version]
repo_json_path.write_text(json.dumps(repo_json))
ctx.info("Creating 'latest' symlink ...")
latest_link = create_repo_path.parent / "latest"
latest_link.symlink_to(create_repo_path.name)
ctx.info("Done")
@pkg.command(
name="onedir",
arguments={
"salt_version": {
"help": "The salt version for which to build the repository",
"required": True,
},
"repo_path": {
"help": "Path where the repository shall be created.",
"required": True,
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
"incoming": {
"help": (
"The path to the directory containing the files that should added to "
"the repository."
),
"required": True,
},
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def onedir(
ctx: Context,
salt_version: str = None,
incoming: pathlib.Path = None,
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the onedir repository.
"""
if TYPE_CHECKING:
assert salt_version is not None
assert incoming is not None
assert repo_path is not None
assert key_id is not None
salt_archive_keyring_gpg_file = (
pathlib.Path("~/salt-archive-keyring.gpg").expanduser().resolve()
)
if not salt_archive_keyring_gpg_file:
ctx.error(f"The file '{salt_archive_keyring_gpg_file}' does not exist.")
ctx.exit(1)
ctx.info("Creating repository directory structure ...")
if nightly_build or rc_build:
create_repo_path = repo_path / "salt"
create_repo_path = create_repo_path / "py3" / "onedir"
repo_json_path = create_repo_path / "repo.json"
if nightly_build:
create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d")
create_repo_path.mkdir(parents=True, exist_ok=True)
ctx.info("Downloading any pre-existing 'repo.json' file")
if nightly_build:
bucket_name = "salt-project-prod-salt-artifacts-nightly"
else:
bucket_name = "salt-project-prod-salt-artifacts-staging"
bucket_url = (
f"s3://{bucket_name}/{create_repo_path.relative_to(repo_path)}/repo.json"
)
ret = ctx.run("aws", "s3", "cp", bucket_url, create_repo_path, check=False)
if ret.returncode:
repo_json = {}
else:
repo_json = json.loads(str(repo_json_path))
if salt_version not in repo_json:
repo_json[salt_version] = {}
hashes_base_path = create_repo_path / f"salt-{salt_version}"
for fpath in incoming.iterdir():
if fpath.suffix not in (".xz", ".zip"):
ctx.info(f"Ignoring {fpath} ...")
continue
ctx.info(f"* Processing {fpath} ...")
dpath = create_repo_path / fpath.name
ctx.info(f"Copying {fpath} to {dpath} ...")
shutil.copyfile(fpath, dpath)
if "-windows-" in fpath.name:
distro = "windows"
elif "-darwin-" in fpath.name:
distro = "macos"
elif "-linux-" in fpath.name:
distro = "linux"
for arch in ("x86_64", "aarch64", "amd64", "x86"):
if arch in fpath.name.lower():
break
repo_json[salt_version][dpath.name] = {
"name": dpath.name,
"version": salt_version,
"os": distro,
"arch": arch,
}
for hash_name in ("blake2b", "sha512", "sha3_512"):
ctx.info(f" * Calculating {hash_name} ...")
hexdigest = _get_file_checksum(fpath, hash_name)
repo_json[salt_version][dpath.name][hash_name.upper()] = hexdigest
with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh:
wfh.write(f"{hexdigest} {dpath.name}\n")
for fpath in create_repo_path.iterdir():
if fpath.suffix in (".gpg", ".pkg"):
continue
ctx.info("GPG Signing '{fpath.relative_to(repo_path)}' ...")
ctx.run("gpg", "-u", key_id, "-o" f"{fpath}.asc", "-a", "-b", "-s", str(fpath))
ctx.info(f"Copying {salt_archive_keyring_gpg_file} to {create_repo_path} ...")
shutil.copyfile(
salt_archive_keyring_gpg_file,
create_repo_path / salt_archive_keyring_gpg_file.name,
)
repo_json["latest"] = repo_json[salt_version]
repo_json_path.write_text(json.dumps(repo_json))
ctx.info("Creating 'latest' symlink ...")
latest_link = create_repo_path.parent / "latest"
latest_link.symlink_to(create_repo_path.name)
ctx.info("Done")
def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str:
with fpath.open("rb") as rfh:
try:
digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined]
except AttributeError:
# Python < 3.11
buf = bytearray(2**18) # Reusable buffer to reduce allocations.
view = memoryview(buf)
digest = getattr(hashlib, hash_name)()
while True:
size = rfh.readinto(buf)
if size == 0:
break # EOF
digest.update(view[:size])
hexdigest: str = digest.hexdigest()
return hexdigest

View file

@ -7,6 +7,7 @@ from __future__ import annotations
import logging
import pathlib
import shutil
from typing import TYPE_CHECKING, cast
from jinja2 import Environment, FileSystemLoader
from ptscripts import Context, command_group
@ -23,10 +24,22 @@ cgroup = command_group(
)
class NoDuplicatesList(list):
class NeedsTracker:
def __init__(self):
self._needs = []
def append(self, need):
if need not in self:
super().append(need)
if need not in self._needs:
self._needs.append(need)
def iter(self, consume=False):
if consume is False:
for need in self._needs:
yield need
return
while self._needs:
need = self._needs.pop(0)
yield need
@cgroup.command(
@ -40,6 +53,9 @@ def generate_workflows(ctx: Context):
"CI": {
"template": "ci.yml",
},
"Nightly": {
"template": "nightly.yml",
},
"Scheduled": {
"template": "scheduled.yml",
},
@ -55,7 +71,10 @@ def generate_workflows(ctx: Context):
loader=FileSystemLoader(str(TEMPLATES)),
)
for workflow_name, details in workflows.items():
template = details["template"]
if TYPE_CHECKING:
assert isinstance(details, dict)
template: str = cast(str, details["template"])
includes: dict[str, bool] = cast(dict, details.get("includes") or {})
workflow_path = WORKFLOWS / template
template_path = TEMPLATES / f"{template}.j2"
ctx.info(
@ -65,7 +84,9 @@ def generate_workflows(ctx: Context):
context = {
"template": template_path.relative_to(REPO_ROOT),
"workflow_name": workflow_name,
"conclusion_needs": NoDuplicatesList(),
"includes": includes,
"conclusion_needs": NeedsTracker(),
"test_salt_needs": NeedsTracker(),
}
loaded_template = env.get_template(f"{template}.j2")
rendered_template = loaded_template.render(**context)