mirror of
https://github.com/saltstack/salt.git
synced 2025-04-16 09:40:20 +00:00
Merge branch 'master' into patch-9
This commit is contained in:
commit
103521a843
81 changed files with 2940 additions and 1114 deletions
|
@ -32,6 +32,7 @@ ignore_errors = True
|
|||
[paths]
|
||||
salt =
|
||||
salt/
|
||||
artifacts/salt
|
||||
**/testing/salt/
|
||||
**\testing\salt
|
||||
tests =
|
||||
|
|
69
.github/actions/build-onedir-deps/action.yml
vendored
Normal file
69
.github/actions/build-onedir-deps/action.yml
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
---
|
||||
name: build-onedir-deps
|
||||
description: Build Onedir Dependencies
|
||||
inputs:
|
||||
platform:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform to build
|
||||
arch:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform arch to build
|
||||
package-name:
|
||||
required: false
|
||||
type: string
|
||||
description: The onedir package name to create
|
||||
default: salt
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
|
||||
- name: Cache Deps Onedir Package Directory
|
||||
id: onedir-pkg-cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: artifacts/${{ inputs.package-name }}
|
||||
key: ${{ inputs.cache-seed }}|relenv|${{ env.RELENV_VERSION }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt') }}
|
||||
|
||||
- name: Create Onedir Directory
|
||||
shell: bash
|
||||
if: steps.onedir-pkg-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||
python3 -m relenv create --arch=${{ inputs.arch }} artifacts/${{ inputs.package-name }}
|
||||
|
||||
- name: Upgrade Setuptools & Pip
|
||||
shell: bash
|
||||
if: steps.onedir-pkg-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
if [ "${{ inputs.platform }}" != "windows" ]; then
|
||||
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install -U "pip>=22.3.1,<23.0"
|
||||
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install -U "setuptools>=65.6.3,<66"
|
||||
else
|
||||
artifacts/${{ inputs.package-name }}/Scripts/python -m pip install -U "pip>=22.3.1,<23.0"
|
||||
artifacts/${{ inputs.package-name }}/Scripts/python -m pip install -U "setuptools>=65.6.3,<66"
|
||||
fi
|
||||
|
||||
- name: Install Salt Onedir Package Dependencies
|
||||
env:
|
||||
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
|
||||
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ inputs.platform }}" != "windows" ]; then
|
||||
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install -r requirements/static/pkg/py3.10/${{ inputs.platform }}.txt
|
||||
else
|
||||
artifacts/${{ inputs.package-name }}/Scripts/python -m pip install -r requirements/static/pkg/py3.10/${{ inputs.platform }}.txt
|
||||
fi
|
||||
|
||||
- name: Cleanup Salt Onedir Directory
|
||||
shell: bash
|
||||
run: |
|
||||
tools pkg pre-archive-cleanup artifacts/${{ inputs.package-name }}
|
108
.github/actions/build-onedir-salt/action.yml
vendored
Normal file
108
.github/actions/build-onedir-salt/action.yml
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
---
|
||||
name: build-onedir-salt
|
||||
description: Build Onedir Package
|
||||
inputs:
|
||||
platform:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform to build
|
||||
arch:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform arch to build
|
||||
package-name:
|
||||
required: false
|
||||
type: string
|
||||
description: The onedir package name to create
|
||||
default: salt
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
|
||||
- name: Download Cached Deps Onedir Package Directory
|
||||
id: onedir-bare-cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: artifacts/${{ inputs.package-name }}
|
||||
key: ${{ inputs.cache-seed }}|relenv|${{ env.RELENV_VERSION }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt') }}
|
||||
|
||||
- name: Download Source Tarball
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: salt-${{ env.SALT_VERSION }}.tar.gz
|
||||
|
||||
- name: Install Salt Into Onedir
|
||||
if: ${{ inputs.platform != 'windows' }}
|
||||
env:
|
||||
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
|
||||
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
|
||||
USE_STATIC_REQUIREMENTS: "1"
|
||||
RELENV_PIP_DIR: "1"
|
||||
shell: bash
|
||||
run: |
|
||||
artifacts/${{ inputs.package-name }}/bin/python3 -m pip install salt-${{ env.SALT_VERSION }}.tar.gz
|
||||
if [ ${{ inputs.platform }} == "darwin" ]; then
|
||||
pkg/macos/prep_salt.sh --build-dir ./artifacts/${{ inputs.package-name }}
|
||||
rm -rf ./artifacts/${{ inputs.package-name }}/opt
|
||||
rm -rf ./artifacts/${{ inputs.package-name }}/etc
|
||||
rm -rf ./artifacts/${{ inputs.package-name }}/Library
|
||||
fi
|
||||
|
||||
- name: Install Salt Into Onedir (Windows)
|
||||
if: ${{ inputs.platform == 'windows' }}
|
||||
env:
|
||||
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
|
||||
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
|
||||
USE_STATIC_REQUIREMENTS: "1"
|
||||
shell: powershell
|
||||
run: |
|
||||
# install salt
|
||||
pkg\windows\install_salt.cmd -BuildDir ".\artifacts\${{ inputs.package-name }}" -CICD -SourceTarball salt-${{ env.SALT_VERSION }}.tar.gz
|
||||
# prep salt
|
||||
pkg\windows\prep_salt.cmd -BuildDir ".\artifacts\${{ inputs.package-name }}" -CICD
|
||||
|
||||
- name: Cleanup Salt Onedir Directory
|
||||
shell: bash
|
||||
run: |
|
||||
tools pkg pre-archive-cleanup artifacts/${{ inputs.package-name }}
|
||||
|
||||
- name: Create Archive
|
||||
shell: bash
|
||||
run: |
|
||||
cd artifacts/
|
||||
tar -cJf ${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz ${{ inputs.package-name }}
|
||||
|
||||
- name: Create Archive (Zipfile)
|
||||
if: ${{ inputs.platform == 'windows' }}
|
||||
shell: powershell
|
||||
run: |
|
||||
cd artifacts
|
||||
py -3 -m zipfile -c "${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip" ${{ inputs.package-name }}
|
||||
|
||||
- name: Create Hash Files
|
||||
shell: bash
|
||||
run: |
|
||||
tools pkg generate-hashes artifacts/${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.*
|
||||
|
||||
- name: Upload Onedir Tarball as an Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
path: artifacts/${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz*
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Onedir Zipfile as an Artifact
|
||||
if: ${{ inputs.platform == 'windows' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip
|
||||
path: artifacts/${{ inputs.package-name }}-${{ env.SALT_VERSION }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.zip*
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
37
.github/actions/build-source-tarball/action.yml
vendored
Normal file
37
.github/actions/build-source-tarball/action.yml
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
---
|
||||
name: build-source-tarball
|
||||
description: Build Source Tarball
|
||||
inputs:
|
||||
nox-version:
|
||||
required: false
|
||||
type: string
|
||||
description: The version of Nox to install
|
||||
default: "2022.8.7"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
|
||||
- name: Install Nox
|
||||
shell: bash
|
||||
run: |
|
||||
nox --version || python3 -m pip install nox==${{ inputs.nox-version }}
|
||||
|
||||
- name: Create Source Tarball
|
||||
shell: bash
|
||||
run: |
|
||||
nox -e build
|
||||
|
||||
- name: Create Hash Files
|
||||
shell: bash
|
||||
run: |
|
||||
tools pkg generate-hashes dist/salt-${{ env.SALT_VERSION }}.tar.gz
|
||||
|
||||
- name: Upload Source Tarball as an Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: salt-${{ env.SALT_VERSION }}.tar.gz
|
||||
path: dist/salt-*.tar.gz*
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
48
.github/actions/setup-relenv/action.yml
vendored
Normal file
48
.github/actions/setup-relenv/action.yml
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
---
|
||||
name: setup-relenv
|
||||
description: Setup Relenv
|
||||
inputs:
|
||||
platform:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform to build
|
||||
arch:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform arch to build
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
|
||||
- name: Install Relenv
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -m pip install relenv
|
||||
|
||||
- name: Set environment variables
|
||||
shell: bash
|
||||
run: |
|
||||
echo RELENV_VERSION="$(python3 -m pip show relenv | grep Version | cut -d ' ' -f2)" >> $GITHUB_ENV
|
||||
|
||||
- name: Cache Relenv Data Directory
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ github.workspace }}/.relenv
|
||||
key: ${{ inputs.cache-seed }}|relenv|${{ env.RELENV_VERSION }}|${{ inputs.platform }}|${{ inputs.arch }}
|
||||
|
||||
- name: Fetch Toolchain
|
||||
if: ${{ inputs.platform == 'linux' }}
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -m relenv toolchain fetch --arch=${{ inputs.arch }}
|
||||
|
||||
- name: Fetch Native Python Build
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -m relenv fetch --arch=${{ inputs.arch }}
|
|
@ -2,6 +2,9 @@
|
|||
name: setup-salt-version
|
||||
description: Setup Salt Version
|
||||
inputs:
|
||||
cwd:
|
||||
type: string
|
||||
default: ""
|
||||
salt-version:
|
||||
type: string
|
||||
default: ""
|
||||
|
@ -23,4 +26,7 @@ runs:
|
|||
id: setup-salt-version
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ inputs.cwd }}" != "" ]; then
|
||||
cd "${{ inputs.cwd }}"
|
||||
fi
|
||||
tools pkg set-salt-version ${{ inputs.salt-version }}
|
||||
|
|
75
.github/workflows/build-deb-packages.yml
vendored
Normal file
75
.github/workflows/build-deb-packages.yml
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
name: Build Salt Packages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building packages.
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: DEB
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- linux
|
||||
- ${{ matrix.arch }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64
|
||||
- aarch64
|
||||
|
||||
container:
|
||||
image: ghcr.io/saltstack/salt-ci-containers/debian:11
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
path: build-deb-pkg
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
|
||||
path: build-deb-pkg/artifacts/
|
||||
|
||||
- name: Download System Dependencies
|
||||
run: |
|
||||
apt update
|
||||
apt install -y python3 python3-venv build-essential devscripts debhelper bash-completion
|
||||
|
||||
- name: Build Deb
|
||||
env:
|
||||
SALT_ONEDIR_ARCHIVE: "${{ github.workspace }}/build-deb-pkg/artifacts/salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz"
|
||||
run: |
|
||||
cd build-deb-pkg
|
||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
||||
ln -sf pkg/debian/ .
|
||||
sed -i 's/SALT_RELEASE_VERSION/${{ inputs.salt-version }}/g' debian/changelog
|
||||
debuild -e SALT_ONEDIR_ARCHIVE -uc -us
|
||||
|
||||
- name: Upload DEBs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: salt-${{ matrix.arch }}-debs
|
||||
path: ${{ github.workspace }}/*.deb
|
||||
if-no-files-found: error
|
||||
retention-days: 7
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-linux-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
167
.github/workflows/build-deps-onedir.yml
vendored
Normal file
167
.github/workflows/build-deps-onedir.yml
vendored
Normal file
|
@ -0,0 +1,167 @@
|
|||
name: Build Salt Packages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building packages.
|
||||
github-hosted-runners:
|
||||
type: boolean
|
||||
required: true
|
||||
self-hosted-runners:
|
||||
type: boolean
|
||||
required: true
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
|
||||
jobs:
|
||||
|
||||
build-deps-linux:
|
||||
name: Linux
|
||||
if: ${{ inputs.self-hosted-runners }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64
|
||||
- aarch64
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- linux
|
||||
- ${{ matrix.arch }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Relenv
|
||||
uses: ./.github/actions/setup-relenv
|
||||
with:
|
||||
platform: linux
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Install Salt Packaging Dependencies into Relenv Onedir
|
||||
uses: ./.github/actions/build-onedir-deps
|
||||
with:
|
||||
platform: linux
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-linux-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
||||
|
||||
build-deps-windows:
|
||||
name: Windows
|
||||
if: ${{ inputs.github-hosted-runners }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86
|
||||
- amd64
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Relenv
|
||||
uses: ./.github/actions/setup-relenv
|
||||
with:
|
||||
platform: windows
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Install Salt Packaging Dependencies into Relenv Onedir
|
||||
uses: ./.github/actions/build-onedir-deps
|
||||
with:
|
||||
platform: windows
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-windows-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
||||
|
||||
build-deps-macos:
|
||||
name: macOS
|
||||
if: ${{ inputs.github-hosted-runners }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Relenv
|
||||
uses: ./.github/actions/setup-relenv
|
||||
with:
|
||||
platform: darwin
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Install Salt Packaging Dependencies into Relenv Onedir
|
||||
uses: ./.github/actions/build-onedir-deps
|
||||
with:
|
||||
platform: darwin
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-macos-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
78
.github/workflows/build-macos-packages.yml
vendored
Normal file
78
.github/workflows/build-macos-packages.yml
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
---
|
||||
name: Build macOS Packages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building packages.
|
||||
|
||||
jobs:
|
||||
|
||||
build-pkgs:
|
||||
name: macOS
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64
|
||||
runs-on:
|
||||
- macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: salt-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz
|
||||
|
||||
- name: Decompress Onedir Tarball
|
||||
run: |
|
||||
mkdir -p artifacts
|
||||
cd artifacts
|
||||
tar xvf ${{ github.workspace }}/salt-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz
|
||||
cd ${{ github.workspace }}
|
||||
mkdir -p pkg/macos/build/opt
|
||||
cp -Rp artifacts/salt pkg/macos/build/opt/
|
||||
|
||||
- name: Build Package
|
||||
run: |
|
||||
cd pkg/macos
|
||||
./prep_salt.sh
|
||||
sudo ./package.sh -n ${{ inputs.salt-version }}
|
||||
|
||||
- name: Upload ${{ matrix.arch }} Package
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: macOS ${{ matrix.arch }} Package
|
||||
path: pkg/macos/*unsigned.pkg
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
44
.github/workflows/build-packages.yml
vendored
Normal file
44
.github/workflows/build-packages.yml
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
name: Build Salt Packages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building packages.
|
||||
github-hosted-runners:
|
||||
type: boolean
|
||||
required: true
|
||||
self-hosted-runners:
|
||||
type: boolean
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build-rpm-pkgs:
|
||||
name: Build RPM Packages
|
||||
if: ${{ inputs.self-hosted-runners }}
|
||||
uses: ./.github/workflows/build-rpm-packages.yml
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
build-deb-pkgs:
|
||||
name: Build DEB Packages
|
||||
if: ${{ inputs.self-hosted-runners }}
|
||||
uses: ./.github/workflows/build-deb-packages.yml
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
build-windows-pkgs:
|
||||
name: Build Windows Packages
|
||||
if: ${{ inputs.github-hosted-runners }}
|
||||
uses: ./.github/workflows/build-windows-packages.yml
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
build-macos-pkgs:
|
||||
name: Build macOS Packages
|
||||
if: ${{ inputs.github-hosted-runners }}
|
||||
uses: ./.github/workflows/build-macos-packages.yml
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
71
.github/workflows/build-rpm-packages.yml
vendored
Normal file
71
.github/workflows/build-rpm-packages.yml
vendored
Normal file
|
@ -0,0 +1,71 @@
|
|||
name: Build Salt Packages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building packages.
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: RPM
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- linux
|
||||
- ${{ matrix.arch }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64
|
||||
- aarch64
|
||||
|
||||
container:
|
||||
image: ghcr.io/saltstack/salt-ci-containers/centos:stream9
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
|
||||
path: artifacts/
|
||||
|
||||
- name: Download System Dependencies
|
||||
run: |
|
||||
yum -y update
|
||||
yum -y install python3 python3-pip openssl git rpmdevtools rpmlint systemd-units libxcrypt-compat
|
||||
|
||||
- name: Build RPM
|
||||
env:
|
||||
SALT_ONEDIR_ARCHIVE: "${{ github.workspace }}/artifacts/salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz"
|
||||
run: |
|
||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
||||
sed -i 's/^Version: \(.*\)$/Version: ${{ inputs.salt-version }}/g' pkg/rpm/salt.spec
|
||||
rpmbuild -bb --define="_salt_src $(pwd)" $(pwd)/pkg/rpm/salt.spec
|
||||
|
||||
- name: Upload RPMs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: salt-${{ matrix.arch }}-rpms
|
||||
path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-linux-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
183
.github/workflows/build-salt-onedir.yml
vendored
Normal file
183
.github/workflows/build-salt-onedir.yml
vendored
Normal file
|
@ -0,0 +1,183 @@
|
|||
name: Build Salt Packages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building packages.
|
||||
github-hosted-runners:
|
||||
type: boolean
|
||||
required: true
|
||||
self-hosted-runners:
|
||||
type: boolean
|
||||
required: true
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
|
||||
jobs:
|
||||
|
||||
build-salt-linux:
|
||||
name: Linux
|
||||
if: ${{ inputs.self-hosted-runners }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64
|
||||
- aarch64
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- linux
|
||||
- ${{ matrix.arch }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
- name: Setup Relenv
|
||||
uses: ./.github/actions/setup-relenv
|
||||
with:
|
||||
platform: linux
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Install Salt into Relenv Onedir
|
||||
uses: ./.github/actions/build-onedir-salt
|
||||
with:
|
||||
platform: linux
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-linux-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
||||
|
||||
build-salt-windows:
|
||||
name: Windows
|
||||
if: ${{ inputs.github-hosted-runners }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86
|
||||
- amd64
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Relenv
|
||||
uses: ./.github/actions/setup-relenv
|
||||
with:
|
||||
platform: windows
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
- name: Install Salt into Relenv Onedir
|
||||
uses: ./.github/actions/build-onedir-salt
|
||||
with:
|
||||
platform: windows
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-windows-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
||||
|
||||
build-salt-macos:
|
||||
name: macOS
|
||||
if: ${{ inputs.github-hosted-runners }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64
|
||||
runs-on: macos-12
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Setup Relenv
|
||||
uses: ./.github/actions/setup-relenv
|
||||
with:
|
||||
platform: darwin
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
- name: Install Salt into Relenv Onedir
|
||||
uses: ./.github/actions/build-onedir-salt
|
||||
with:
|
||||
platform: darwin
|
||||
arch: ${{ matrix.arch }}
|
||||
cache-seed: ${{ inputs.cache-seed }}
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-macos-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
83
.github/workflows/build-windows-packages.yml
vendored
Normal file
83
.github/workflows/build-windows-packages.yml
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
---
|
||||
name: Build Windows Packages
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building packages.
|
||||
|
||||
jobs:
|
||||
|
||||
build-pkgs:
|
||||
name: Windows
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
arch:
|
||||
- x86
|
||||
- amd64
|
||||
runs-on:
|
||||
- windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: salt-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.zip
|
||||
|
||||
- name: Decompress Onedir Zipfile
|
||||
run: |
|
||||
py -3 -m zipfile -e salt-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.zip pkg/windows/
|
||||
mv pkg/windows/salt pkg/windows/buildenv
|
||||
|
||||
- name: Build Package
|
||||
shell: powershell
|
||||
run: |
|
||||
& pkg/windows/build.cmd -Architecture ${{ matrix.arch }} -Version ${{ inputs.salt-version }} -CICD -SkipInstall
|
||||
|
||||
- name: Upload ${{ matrix.arch }} NSIS Package
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows ${{ matrix.arch }} NSIS Package
|
||||
path: pkg/windows/build/Salt-*.exe
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload ${{ matrix.arch }} MSI Package
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows ${{ matrix.arch }} MSI Package
|
||||
path: pkg/windows/build/Salt-*.msi
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
py -3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ matrix.arch }}
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
207
.github/workflows/ci.yml
vendored
207
.github/workflows/ci.yml
vendored
|
@ -26,7 +26,8 @@ on:
|
|||
|
||||
env:
|
||||
COLUMNS: 160
|
||||
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
|
||||
CACHE_SEED: SEED-4 # Bump the number to invalidate all caches
|
||||
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
||||
|
||||
permissions:
|
||||
contents: read # for dorny/paths-filter to fetch a list of changed files
|
||||
|
@ -195,7 +196,7 @@ jobs:
|
|||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-prepare-ci
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-prepare-ci
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
|
@ -232,24 +233,97 @@ jobs:
|
|||
with:
|
||||
changed-files: ${{ needs.prepare-ci.outputs.changed-files }}
|
||||
|
||||
twine-check:
|
||||
name: Twine Check
|
||||
build-source-tarball:
|
||||
name: Build Source Tarball
|
||||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }}
|
||||
uses: ./.github/workflows/twine-check-action.yml
|
||||
needs:
|
||||
- prepare-ci
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
|
||||
- name: Build Source Tarball
|
||||
uses: ./.github/actions/build-source-tarball
|
||||
|
||||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-source-tarball
|
||||
|
||||
- name: Upload Exit Status
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: exitstatus
|
||||
path: exitstatus
|
||||
if-no-files-found: error
|
||||
|
||||
build-deps-onedir:
|
||||
name: Build Dependencies Onedir
|
||||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
uses: ./.github/workflows/build-deps-onedir.yml
|
||||
with:
|
||||
changed-files: ${{ needs.prepare-ci.outputs.changed-files }}
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
build-salt-onedir:
|
||||
name: Build Salt Onedir
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-deps-onedir
|
||||
- build-source-tarball
|
||||
uses: ./.github/workflows/build-salt-onedir.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
build-pkgs:
|
||||
name: Build Salt Packages
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/build-packages.yml
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
windows-2016:
|
||||
name: Windows 2016
|
||||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2016
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -259,10 +333,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2019
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -272,23 +349,29 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2022
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
||||
macos-12:
|
||||
name: MacOS 12
|
||||
name: macOS 12
|
||||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action-macos.yml
|
||||
with:
|
||||
distro-slug: macos-12
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: darwin
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -298,10 +381,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: almalinux-8
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -311,10 +397,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: almalinux-9
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -324,10 +413,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: amazonlinux-2
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -337,10 +429,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: archlinux-lts
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -350,10 +445,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centos-7
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -363,10 +461,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centosstream-8
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -376,10 +477,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centosstream-9
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -389,10 +493,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-10
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -402,23 +509,29 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-11
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
||||
debian-11-arm64:
|
||||
name: Debian 11 ARM64
|
||||
name: Debian 11 Arm64
|
||||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-11-arm64
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -428,10 +541,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -441,10 +557,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: opensuse-15
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -454,10 +573,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: photonos-3
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -467,10 +589,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: photonos-4
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -480,10 +605,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-18.04
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -493,10 +621,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-20.04
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -506,10 +637,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-20.04-arm64
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -519,10 +653,13 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-22.04
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
@ -532,15 +669,17 @@ jobs:
|
|||
if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-ci
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-22.04-arm64
|
||||
nox-session: ci-test
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-ci.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-ci.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-ci.outputs.cache-seed }}
|
||||
|
||||
|
||||
set-pipeline-exit-status:
|
||||
# This step is just so we can make github require this step, to pass checks
|
||||
# on a pull request instead of requiring all
|
||||
|
@ -551,7 +690,9 @@ jobs:
|
|||
- pre-commit
|
||||
- docs
|
||||
- lint
|
||||
- twine-check
|
||||
- build-deps-onedir
|
||||
- build-salt-onedir
|
||||
- build-pkgs
|
||||
- almalinux-8
|
||||
- almalinux-9
|
||||
- amazonlinux-2
|
||||
|
|
4
.github/workflows/docs-action.yml
vendored
4
.github/workflows/docs-action.yml
vendored
|
@ -58,7 +58,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-docs-html
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
@ -120,7 +120,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-docs-man
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
|
4
.github/workflows/lint-action.yml
vendored
4
.github/workflows/lint-action.yml
vendored
|
@ -57,7 +57,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-lint-salt
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
@ -116,7 +116,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-lint-salt
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
|
2
.github/workflows/pre-commit-action.yml
vendored
2
.github/workflows/pre-commit-action.yml
vendored
|
@ -61,7 +61,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-pre-commit
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
|
88
.github/workflows/test-action-macos.yml
vendored
88
.github/workflows/test-action-macos.yml
vendored
|
@ -15,10 +15,6 @@ on:
|
|||
required: true
|
||||
type: string
|
||||
description: JSON string containing information about what and how to run the test suite
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
python-version:
|
||||
required: false
|
||||
type: string
|
||||
|
@ -28,6 +24,23 @@ on:
|
|||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to running tests.
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
platform:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform being tested
|
||||
arch:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform arch being tested
|
||||
package-name:
|
||||
required: false
|
||||
type: string
|
||||
description: The onedir package name to use
|
||||
default: salt
|
||||
|
||||
|
||||
env:
|
||||
|
@ -86,6 +99,21 @@ jobs:
|
|||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
path: artifacts/
|
||||
|
||||
- name: Decompress Onedir Tarball
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||
cd artifacts
|
||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
|
||||
# Skip jobs if nox.*.tar.* is already cached
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
|
@ -110,9 +138,9 @@ jobs:
|
|||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
if [ "${{ matrix.transport }}" != "tcp" ]; then
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV"
|
||||
else
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Install Dependencies
|
||||
|
@ -142,7 +170,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
@ -156,7 +184,7 @@ jobs:
|
|||
test:
|
||||
name: Test
|
||||
runs-on: ${{ inputs.distro-slug }}
|
||||
timeout-minutes: 600 # 10 Hours
|
||||
timeout-minutes: 360 # 6 Hours
|
||||
needs:
|
||||
- generate-matrix
|
||||
- dependencies
|
||||
|
@ -173,8 +201,20 @@ jobs:
|
|||
run: |
|
||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
path: artifacts/
|
||||
|
||||
- name: Decompress Onedir Tarball
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||
cd artifacts
|
||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
|
||||
- name: Install System Dependencies
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
brew install tree
|
||||
|
||||
|
@ -200,9 +240,9 @@ jobs:
|
|||
- name: Define Nox Session
|
||||
run: |
|
||||
if [ "${{ matrix.transport }}" != "tcp" ]; then
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV"
|
||||
else
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Download testrun-changed-files.txt
|
||||
|
@ -269,17 +309,20 @@ jobs:
|
|||
|
||||
- name: Fix file ownership
|
||||
run: |
|
||||
sudo chown -R $(id -un) .
|
||||
sudo chown -R "$(id -un)" .
|
||||
|
||||
- name: Combine Coverage Reports
|
||||
if: always() && contains(fromJSON('["canceled", "skipped"]'), steps.run-tests.outcome) == false
|
||||
if: always()
|
||||
run: |
|
||||
nox -e combine-coverage
|
||||
|
||||
- name: Prepare Test Run Artifacts
|
||||
id: download-artifacts-from-vm
|
||||
if: always() && contains(fromJSON('["canceled", "skipped"]'), steps.run-tests.outcome) == false
|
||||
if: always()
|
||||
run: |
|
||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
||||
# from it showing in the tree command below
|
||||
rm -rf artifacts/salt*
|
||||
tree -a artifacts
|
||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}
|
||||
echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV
|
||||
|
@ -289,7 +332,10 @@ jobs:
|
|||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
|
||||
path: artifacts
|
||||
path: |
|
||||
artifacts
|
||||
!artifacts/salt/*
|
||||
!artifacts/salt-*.tar.*
|
||||
|
||||
# - name: Publish Test Report
|
||||
# uses: mikepenz/action-junit-report@v3
|
||||
|
@ -320,7 +366,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-${{ matrix.tests-chunk }}-tests
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
@ -350,9 +396,9 @@ jobs:
|
|||
- name: Define Nox Session
|
||||
run: |
|
||||
if [ "${{ matrix.transport }}" != "tcp" ]; then
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV"
|
||||
else
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Download Test Run Artifacts
|
||||
|
@ -374,10 +420,10 @@ jobs:
|
|||
name: code-coverage
|
||||
path: artifacts/coverage
|
||||
|
||||
- name: Set up Python 3.10
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.9"
|
||||
|
||||
- name: Install Nox
|
||||
run: |
|
||||
|
@ -410,7 +456,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
|
88
.github/workflows/test-action.yml
vendored
88
.github/workflows/test-action.yml
vendored
|
@ -23,6 +23,19 @@ on:
|
|||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
platform:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform being tested
|
||||
arch:
|
||||
required: true
|
||||
type: string
|
||||
description: The platform arch being tested
|
||||
package-name:
|
||||
required: false
|
||||
type: string
|
||||
description: The onedir package name to use
|
||||
default: salt
|
||||
|
||||
|
||||
env:
|
||||
|
@ -88,20 +101,37 @@ jobs:
|
|||
key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
|
||||
# Skip jobs if nox.*.tar.* is already cached
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
path: artifacts/
|
||||
|
||||
- name: Decompress Onedir Tarball
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||
cd artifacts
|
||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
|
||||
- name: PyPi Proxy
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Define Nox Session
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
if [ "${{ matrix.transport }}" != "tcp" ]; then
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV"
|
||||
else
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Start VM
|
||||
|
@ -111,6 +141,7 @@ jobs:
|
|||
tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }}
|
||||
|
||||
- name: List Free Space
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
|
||||
|
||||
|
@ -147,7 +178,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
@ -164,7 +195,7 @@ jobs:
|
|||
- self-hosted
|
||||
- linux
|
||||
- bastion
|
||||
timeout-minutes: 240 # 4 Hours - More than this and something is wrong
|
||||
timeout-minutes: 300 # 5 Hours - More than this and something is wrong
|
||||
needs:
|
||||
- dependencies
|
||||
- generate-matrix
|
||||
|
@ -181,6 +212,19 @@ jobs:
|
|||
run: |
|
||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
path: artifacts/
|
||||
|
||||
- name: Decompress Onedir Tarball
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
|
||||
cd artifacts
|
||||
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
|
||||
|
||||
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
|
@ -197,9 +241,9 @@ jobs:
|
|||
- name: Define Nox Session
|
||||
run: |
|
||||
if [ "${{ matrix.transport }}" != "tcp" ]; then
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV"
|
||||
else
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Download testrun-changed-files.txt
|
||||
|
@ -229,7 +273,7 @@ jobs:
|
|||
|
||||
- name: Show System Info & Test Plan
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=18000 vm testplan --skip-requirements-install \
|
||||
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \
|
||||
${{ matrix.tests-chunk }}
|
||||
|
||||
|
@ -237,7 +281,7 @@ jobs:
|
|||
id: run-slow-changed-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=18000 vm test --skip-requirements-install \
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
${{ matrix.tests-chunk }} -- --run-slow --suppress-no-test-exit-code \
|
||||
--from-filenames=testrun-changed-files.txt
|
||||
|
@ -246,7 +290,7 @@ jobs:
|
|||
id: run-fast-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=18000 vm test --skip-requirements-install \
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
${{ matrix.tests-chunk }}
|
||||
|
||||
|
@ -254,20 +298,23 @@ jobs:
|
|||
id: run-full-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=18000 vm test --skip-requirements-install \
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
${{ matrix.tests-chunk }} -- --run-slow
|
||||
|
||||
- name: Combine Coverage Reports
|
||||
if: always() && steps.spin-up-vm.outcome == 'success' && contains(fromJSON('["canceled", "skipped"]'), steps.run-tests.outcome) == false
|
||||
if: always() && steps.spin-up-vm.outcome == 'success'
|
||||
run: |
|
||||
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
|
||||
|
||||
- name: Download Test Run Artifacts
|
||||
id: download-artifacts-from-vm
|
||||
if: always() && steps.spin-up-vm.outcome == 'success' && contains(fromJSON('["canceled", "skipped"]'), steps.run-tests.outcome) == false
|
||||
if: always() && steps.spin-up-vm.outcome == 'success'
|
||||
run: |
|
||||
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
|
||||
# Delete the salt onedir, we won't need it anymore and it will prevent
|
||||
# from it showing in the tree command below
|
||||
rm -rf artifacts/salt*
|
||||
tree -a artifacts
|
||||
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}
|
||||
echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV
|
||||
|
@ -282,7 +329,10 @@ jobs:
|
|||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}
|
||||
path: artifacts
|
||||
path: |
|
||||
artifacts
|
||||
!artifacts/salt/*
|
||||
!artifacts/salt-*.tar.*
|
||||
|
||||
# - name: Publish Test Report
|
||||
# uses: mikepenz/action-junit-report@v3
|
||||
|
@ -321,7 +371,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-${{ matrix.tests-chunk }}-tests
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
@ -354,9 +404,9 @@ jobs:
|
|||
- name: Define Nox Session
|
||||
run: |
|
||||
if [ "${{ matrix.transport }}" != "tcp" ]; then
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV"
|
||||
else
|
||||
echo NOX_SESSION=${{ inputs.nox-session}}-tcp-3 >> $GITHUB_ENV
|
||||
echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Download Test Run Artifacts
|
||||
|
@ -378,10 +428,10 @@ jobs:
|
|||
name: code-coverage
|
||||
path: artifacts/coverage
|
||||
|
||||
- name: Set up Python 3.10
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.9"
|
||||
|
||||
- name: Install Nox
|
||||
run: |
|
||||
|
@ -414,7 +464,7 @@ jobs:
|
|||
- name: Set Exit Status
|
||||
if: always()
|
||||
run: |
|
||||
mkdir exitstatus
|
||||
python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)"
|
||||
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report
|
||||
|
||||
- name: Upload Exit Status
|
||||
|
|
39
.github/workflows/twine-check-action.yml
vendored
39
.github/workflows/twine-check-action.yml
vendored
|
@ -1,39 +0,0 @@
|
|||
name: Twine Check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
changed-files:
|
||||
required: true
|
||||
type: string
|
||||
description: JSON string containing information about changed files
|
||||
|
||||
jobs:
|
||||
Twine-Check:
|
||||
name: Run 'twine check' Against Salt
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install dependencies
|
||||
env:
|
||||
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
|
||||
run: |
|
||||
pip install --upgrade pip setuptools wheel
|
||||
pip install twine>=3.4.1
|
||||
# pip install build # add build when implement pyproject.toml
|
||||
|
||||
- name: Create Source Tarball
|
||||
run: |
|
||||
python3 setup.py sdist
|
||||
# TBD python3 -m build --sdist # replace with build when implement pyproject.toml
|
||||
|
||||
- name: Twine check
|
||||
run: |
|
||||
python3 -m twine check dist/*
|
|
@ -909,30 +909,6 @@ repos:
|
|||
# <---- Lint CI Requirements ---------------------------------------------------------------------------------------
|
||||
|
||||
# ----- Changelog ------------------------------------------------------------------------------------------------->
|
||||
- id: pip-tools-compile
|
||||
alias: compile-ci-changelog-3.6-requirements
|
||||
name: Changelog CI Py3.6 Requirements
|
||||
files: ^requirements/static/ci/(changelog\.in|py3\.6/(changelog|linux)\.txt)$
|
||||
pass_filenames: false
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
- --platform=linux
|
||||
- --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt
|
||||
- requirements/static/ci/changelog.in
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-ci-changelog-3.7-requirements
|
||||
name: Changelog CI Py3.7 Requirements
|
||||
files: ^requirements/static/ci/(changelog\.in|py3\.7/(changelog|linux)\.txt)$
|
||||
pass_filenames: false
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
- --platform=linux
|
||||
- --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt
|
||||
- requirements/static/ci/changelog.in
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-ci-changelog-3.8-requirements
|
||||
name: Changelog CI Py3.8 Requirements
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
alma-8-x86_64: ami-09961fccef817a9ef
|
||||
amazon-2-x86_64: ami-0266b2760c01d89a4
|
||||
arch-lts-x86_64: ami-04ae887411feaa715
|
||||
centos-7-x86_64: ami-0a45d4ae356ee2982
|
||||
centosstream-9-x86_64: ami-0f9d66d2ef19344f0
|
||||
debian-10-amd64: ami-0ea6eb8b5f860d8a1
|
||||
debian-11-amd64: ami-0581a6058da20a17d
|
||||
debian-11-arm64: ami-0c8b7dff12df47ba9
|
||||
opensuse-15-x86_64: ami-014a93174618d6da7
|
||||
photon-3-x86_64: ami-02e8f68f324dae0b6
|
||||
ubuntu-1804-amd64: ami-00830eefa0545100c
|
||||
ubuntu-2004-amd64: ami-008bd661d292c4848
|
||||
ubuntu-2004-arm64: ami-04c76ca9f709e81f9
|
||||
ubuntu-2204-amd64: ami-046cdaa1b73689025
|
||||
windows-2016-x64: ami-0dfd4f62f34c9fdc5
|
||||
windows-2019-x64: ami-07b387a9d8e5d8f85
|
||||
alma-8-x86_64: ami-06a0308b91cb200a9
|
||||
amazon-2-x86_64: ami-0c90093e3bd780a39
|
||||
arch-lts-x86_64: ami-06add6fca560d2eb4
|
||||
centos-7-x86_64: ami-0ebd831e01bac517e
|
||||
centosstream-9-x86_64: ami-0fb16e7ca51a4a2ce
|
||||
debian-10-amd64: ami-0921776a91e318079
|
||||
debian-11-arm64: ami-0cee49983fb18e3c6
|
||||
debian-11-amd64: ami-0254b7daa13b2e771
|
||||
opensuse-15-x86_64: ami-054f02d7bdc81a344
|
||||
photon-3-x86_64: ami-0339c0cccf30b6ffb
|
||||
ubuntu-1804-amd64: ami-04aff0098ebd2e3f2
|
||||
ubuntu-2004-arm64: ami-0a910a24b6179c172
|
||||
ubuntu-2004-amd64: ami-05cd617cfcc4c2e2d
|
||||
ubuntu-2204-amd64: ami-04098f5c44c12ffc5
|
||||
windows-2016-x64: ami-0b4c01a38d46cd809
|
||||
windows-2019-x64: ami-033defff9aa227eb3
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
{
|
||||
"almalinux-8": {
|
||||
"ami": "ami-09961fccef817a9ef",
|
||||
"ami": "ami-06a0308b91cb200a9",
|
||||
"ami_description": "CI Image of AlmaLinux 8 x86_64",
|
||||
"ami_name": "salt-project/ci/almalinux/8/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/almalinux/8/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -10,9 +10,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"almalinux-9": {
|
||||
"ami": "ami-05810f528f7878bff",
|
||||
"ami": "ami-040d4b7558be06356",
|
||||
"ami_description": "CI Image of AlmaLinux 9 x86_64",
|
||||
"ami_name": "salt-project/ci/almalinux/9/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/almalinux/9/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -20,9 +20,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"amazonlinux-2": {
|
||||
"ami": "ami-0266b2760c01d89a4",
|
||||
"ami": "ami-0c90093e3bd780a39",
|
||||
"ami_description": "CI Image of AmazonLinux 2 x86_64",
|
||||
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230117.1236",
|
||||
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -30,9 +30,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"archlinux-lts": {
|
||||
"ami": "ami-04ae887411feaa715",
|
||||
"ami": "ami-06add6fca560d2eb4",
|
||||
"ami_description": "CI Image of ArchLinux lts x86_64",
|
||||
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "false",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -40,9 +40,9 @@
|
|||
"ssh_username": "arch"
|
||||
},
|
||||
"centos-7": {
|
||||
"ami": "ami-0a45d4ae356ee2982",
|
||||
"ami": "ami-0ebd831e01bac517e",
|
||||
"ami_description": "CI Image of CentOS 7 x86_64",
|
||||
"ami_name": "salt-project/ci/centos/7/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/centos/7/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -50,9 +50,9 @@
|
|||
"ssh_username": "centos"
|
||||
},
|
||||
"centosstream-8": {
|
||||
"ami": "ami-0a9864783b49581fc",
|
||||
"ami": "ami-0bcdc694f4891301b",
|
||||
"ami_description": "CI Image of CentOSStream 8 x86_64",
|
||||
"ami_name": "salt-project/ci/centosstream/8/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/centosstream/8/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -60,9 +60,9 @@
|
|||
"ssh_username": "cloud-user"
|
||||
},
|
||||
"centosstream-9": {
|
||||
"ami": "ami-0f9d66d2ef19344f0",
|
||||
"ami": "ami-0fb16e7ca51a4a2ce",
|
||||
"ami_description": "CI Image of CentOSStream 9 x86_64",
|
||||
"ami_name": "salt-project/ci/centosstream/9/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/centosstream/9/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -70,9 +70,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"debian-10": {
|
||||
"ami": "ami-0ea6eb8b5f860d8a1",
|
||||
"ami": "ami-0921776a91e318079",
|
||||
"ami_description": "CI Image of Debian 10 x86_64",
|
||||
"ami_name": "salt-project/ci/debian/10/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/debian/10/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -80,9 +80,9 @@
|
|||
"ssh_username": "admin"
|
||||
},
|
||||
"debian-11-arm64": {
|
||||
"ami": "ami-0c8b7dff12df47ba9",
|
||||
"ami": "ami-0cee49983fb18e3c6",
|
||||
"ami_description": "CI Image of Debian 11 arm64",
|
||||
"ami_name": "salt-project/ci/debian/11/arm64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/debian/11/arm64/20230124.1205",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "false",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -90,9 +90,9 @@
|
|||
"ssh_username": "admin"
|
||||
},
|
||||
"debian-11": {
|
||||
"ami": "ami-0581a6058da20a17d",
|
||||
"ami": "ami-0254b7daa13b2e771",
|
||||
"ami_description": "CI Image of Debian 11 x86_64",
|
||||
"ami_name": "salt-project/ci/debian/11/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/debian/11/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -100,9 +100,9 @@
|
|||
"ssh_username": "admin"
|
||||
},
|
||||
"fedora-36": {
|
||||
"ami": "ami-0d18d7a9c38a45715",
|
||||
"ami": "ami-02b765791005ea7a9",
|
||||
"ami_description": "CI Image of Fedora 36 x86_64",
|
||||
"ami_name": "salt-project/ci/fedora/36/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/fedora/36/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -110,9 +110,9 @@
|
|||
"ssh_username": "fedora"
|
||||
},
|
||||
"opensuse-15": {
|
||||
"ami": "ami-014a93174618d6da7",
|
||||
"ami": "ami-054f02d7bdc81a344",
|
||||
"ami_description": "CI Image of Opensuse 15 x86_64",
|
||||
"ami_name": "salt-project/ci/opensuse/15/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/opensuse/15/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -120,9 +120,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"photonos-3": {
|
||||
"ami": "ami-02e8f68f324dae0b6",
|
||||
"ami": "ami-0339c0cccf30b6ffb",
|
||||
"ami_description": "CI Image of PhotonOS 3 x86_64",
|
||||
"ami_name": "salt-project/ci/photonos/3/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/photonos/3/x86_64/20230124.1204",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -130,9 +130,9 @@
|
|||
"ssh_username": "root"
|
||||
},
|
||||
"photonos-4": {
|
||||
"ami": "ami-0ebdae3c2bc3a6c6c",
|
||||
"ami": "ami-0ce4f660efb81cbb6",
|
||||
"ami_description": "CI Image of PhotonOS 4 x86_64",
|
||||
"ami_name": "salt-project/ci/photonos/4/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/photonos/4/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -140,9 +140,9 @@
|
|||
"ssh_username": "root"
|
||||
},
|
||||
"ubuntu-18.04": {
|
||||
"ami": "ami-00830eefa0545100c",
|
||||
"ami": "ami-04aff0098ebd2e3f2",
|
||||
"ami_description": "CI Image of Ubuntu 18.04 x86_64",
|
||||
"ami_name": "salt-project/ci/ubuntu/18.04/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/ubuntu/18.04/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -150,9 +150,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"ubuntu-20.04-arm64": {
|
||||
"ami": "ami-04c76ca9f709e81f9",
|
||||
"ami": "ami-0a910a24b6179c172",
|
||||
"ami_description": "CI Image of Ubuntu 20.04 arm64",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230124.1205",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -160,9 +160,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"ubuntu-20.04": {
|
||||
"ami": "ami-008bd661d292c4848",
|
||||
"ami": "ami-05cd617cfcc4c2e2d",
|
||||
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -170,9 +170,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"ubuntu-22.04-arm64": {
|
||||
"ami": "ami-0ec137d3046df90f7",
|
||||
"ami": "ami-0414bc0dc4e151967",
|
||||
"ami_description": "CI Image of Ubuntu 22.04 arm64",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230124.1205",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -180,9 +180,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"ubuntu-22.04": {
|
||||
"ami": "ami-046cdaa1b73689025",
|
||||
"ami": "ami-04098f5c44c12ffc5",
|
||||
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -190,9 +190,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"windows-2016": {
|
||||
"ami": "ami-0dfd4f62f34c9fdc5",
|
||||
"ami": "ami-0b4c01a38d46cd809",
|
||||
"ami_description": "CI Image of Windows 2016 x86_64",
|
||||
"ami_name": "salt-project/ci/windows/2016/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/windows/2016/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.xlarge",
|
||||
|
@ -200,9 +200,9 @@
|
|||
"ssh_username": "Administrator"
|
||||
},
|
||||
"windows-2019": {
|
||||
"ami": "ami-07b387a9d8e5d8f85",
|
||||
"ami": "ami-033defff9aa227eb3",
|
||||
"ami_description": "CI Image of Windows 2019 x86_64",
|
||||
"ami_name": "salt-project/ci/windows/2019/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/windows/2019/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.xlarge",
|
||||
|
@ -210,9 +210,9 @@
|
|||
"ssh_username": "Administrator"
|
||||
},
|
||||
"windows-2022": {
|
||||
"ami": "ami-0ed6abf248f8a412c",
|
||||
"ami": "ami-01b8005f68b79a901",
|
||||
"ami_description": "CI Image of Windows 2022 x86_64",
|
||||
"ami_name": "salt-project/ci/windows/2022/x86_64/20230117.1237",
|
||||
"ami_name": "salt-project/ci/windows/2022/x86_64/20230124.1205",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.xlarge",
|
||||
|
|
208
noxfile.py
208
noxfile.py
|
@ -7,11 +7,14 @@ Nox configuration script
|
|||
# pylint: disable=resource-leakage,3rd-party-module-not-gated
|
||||
|
||||
import datetime
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sqlite3
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
|
||||
# fmt: off
|
||||
|
@ -69,14 +72,18 @@ if COVERAGE_FILE is None:
|
|||
IS_DARWIN = sys.platform.lower().startswith("darwin")
|
||||
IS_WINDOWS = sys.platform.lower().startswith("win")
|
||||
IS_FREEBSD = sys.platform.lower().startswith("freebsd")
|
||||
IS_LINUX = sys.platform.lower().startswith("linux")
|
||||
ONEDIR_ARTIFACT_PATH = ARTIFACTS_DIR / "salt"
|
||||
if IS_WINDOWS:
|
||||
ONEDIR_PYTHON_PATH = ONEDIR_ARTIFACT_PATH / "Scripts" / "python.exe"
|
||||
else:
|
||||
ONEDIR_PYTHON_PATH = ONEDIR_ARTIFACT_PATH / "bin" / "python3"
|
||||
# Python versions to run against
|
||||
_PYTHON_VERSIONS = ("3", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10")
|
||||
|
||||
# Nox options
|
||||
# Reuse existing virtualenvs
|
||||
nox.options.reuse_existing_virtualenvs = True
|
||||
# Don't fail on missing interpreters
|
||||
nox.options.error_on_missing_interpreters = False
|
||||
|
||||
# Change current directory to REPO_ROOT
|
||||
os.chdir(str(REPO_ROOT))
|
||||
|
@ -117,8 +124,11 @@ def session_run_always(session, *command, **kwargs):
|
|||
session._runner.global_config.install_only = old_install_only_value
|
||||
|
||||
|
||||
def find_session_runner(session, name, python_version, **kwargs):
|
||||
name += "-{}".format(python_version)
|
||||
def find_session_runner(session, name, python_version, onedir=False, **kwargs):
|
||||
if onedir:
|
||||
name += "-onedir-{}".format(ONEDIR_PYTHON_PATH)
|
||||
else:
|
||||
name += "-{}".format(python_version)
|
||||
for s, _ in session._runner.manifest.list_all_sessions():
|
||||
if name not in s.signatures:
|
||||
continue
|
||||
|
@ -273,7 +283,7 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ
|
|||
session.error("Could not find a linux requirements file for {}".format(pydir))
|
||||
|
||||
|
||||
def _upgrade_pip_setuptools_and_wheel(session, upgrade=True):
|
||||
def _upgrade_pip_setuptools_and_wheel(session, upgrade=True, onedir=False):
|
||||
if SKIP_REQUIREMENTS_INSTALL:
|
||||
session.log(
|
||||
"Skipping Python Requirements because SKIP_REQUIREMENTS_INSTALL was found in the environ"
|
||||
|
@ -289,21 +299,34 @@ def _upgrade_pip_setuptools_and_wheel(session, upgrade=True):
|
|||
]
|
||||
if upgrade:
|
||||
install_command.append("-U")
|
||||
install_command.extend(
|
||||
[
|
||||
"pip>=20.2.4,<21.2",
|
||||
"setuptools!=50.*,!=51.*,!=52.*,<59",
|
||||
if onedir:
|
||||
requirements = [
|
||||
"pip>=22.3.1,<23.0",
|
||||
# https://github.com/pypa/setuptools/commit/137ab9d684075f772c322f455b0dd1f992ddcd8f
|
||||
"setuptools>=65.6.3,<66",
|
||||
"wheel",
|
||||
]
|
||||
)
|
||||
else:
|
||||
requirements = [
|
||||
"pip>=20.2.4,<21.2",
|
||||
"setuptools!=50.*,!=51.*,!=52.*,<59",
|
||||
]
|
||||
install_command.extend(requirements)
|
||||
session_run_always(session, *install_command, silent=PIP_INSTALL_SILENT)
|
||||
return True
|
||||
|
||||
|
||||
def _install_requirements(
|
||||
session, transport, *extra_requirements, requirements_type="ci"
|
||||
session,
|
||||
transport,
|
||||
*extra_requirements,
|
||||
requirements_type="ci",
|
||||
onedir=False,
|
||||
):
|
||||
if not _upgrade_pip_setuptools_and_wheel(session):
|
||||
if onedir and IS_LINUX:
|
||||
session_run_always(session, "python3", "-m", "relenv", "toolchain", "fetch")
|
||||
|
||||
if not _upgrade_pip_setuptools_and_wheel(session, onedir=onedir):
|
||||
return False
|
||||
|
||||
# Install requirements
|
||||
|
@ -1022,9 +1045,12 @@ def _pytest(session, coverage, cmd_args, env=None):
|
|||
session.run("python", "-m", "pytest", *args, env=env)
|
||||
|
||||
|
||||
def _ci_test(session, transport):
|
||||
def _ci_test(session, transport, onedir=False):
|
||||
# Install requirements
|
||||
_install_requirements(session, transport)
|
||||
_install_requirements(session, transport, onedir=onedir)
|
||||
env = {}
|
||||
if onedir:
|
||||
env["ONEDIR_TESTRUN"] = "1"
|
||||
chunks = {
|
||||
"unit": [
|
||||
"tests/unit",
|
||||
|
@ -1086,7 +1112,7 @@ def _ci_test(session, transport):
|
|||
]
|
||||
+ chunk_cmd
|
||||
)
|
||||
_pytest(session, track_code_coverage, pytest_args)
|
||||
_pytest(session, track_code_coverage, pytest_args, env=env)
|
||||
except CommandFailed:
|
||||
if rerun_failures is False:
|
||||
raise
|
||||
|
@ -1106,7 +1132,7 @@ def _ci_test(session, transport):
|
|||
]
|
||||
+ chunk_cmd
|
||||
)
|
||||
_pytest(session, track_code_coverage, pytest_args)
|
||||
_pytest(session, track_code_coverage, pytest_args, env=env)
|
||||
|
||||
|
||||
@nox.session(python=_PYTHON_VERSIONS, name="ci-test")
|
||||
|
@ -1119,6 +1145,38 @@ def ci_test_tcp(session):
|
|||
_ci_test(session, "tcp")
|
||||
|
||||
|
||||
@nox.session(
|
||||
python=str(ONEDIR_PYTHON_PATH),
|
||||
name="ci-test-onedir",
|
||||
venv_params=["--system-site-packages"],
|
||||
)
|
||||
def ci_test_onedir(session):
|
||||
if not ONEDIR_ARTIFACT_PATH.exists():
|
||||
session.error(
|
||||
"The salt onedir artifact, expected to be in '{}', was not found".format(
|
||||
ONEDIR_ARTIFACT_PATH.relative_to(REPO_ROOT)
|
||||
)
|
||||
)
|
||||
|
||||
_ci_test(session, "zeromq", onedir=True)
|
||||
|
||||
|
||||
@nox.session(
|
||||
python=str(ONEDIR_PYTHON_PATH),
|
||||
name="ci-test-onedir-tcp",
|
||||
venv_params=["--system-site-packages"],
|
||||
)
|
||||
def ci_test_onedir_tcp(session):
|
||||
if not ONEDIR_ARTIFACT_PATH.exists():
|
||||
session.error(
|
||||
"The salt onedir artifact, expected to be in '{}', was not found".format(
|
||||
ONEDIR_ARTIFACT_PATH.relative_to(REPO_ROOT)
|
||||
)
|
||||
)
|
||||
|
||||
_ci_test(session, "tcp", onedir=True)
|
||||
|
||||
|
||||
@nox.session(python="3", name="report-coverage")
|
||||
def report_coverage(session):
|
||||
_report_coverage(session)
|
||||
|
@ -1177,7 +1235,7 @@ def compress_dependencies(session):
|
|||
|
||||
|
||||
@nox.session(
|
||||
python="3",
|
||||
python=str(ONEDIR_PYTHON_PATH),
|
||||
name="pre-archive-cleanup",
|
||||
)
|
||||
@nox.parametrize("pkg", [False, True])
|
||||
|
@ -1220,7 +1278,7 @@ def pre_archive_cleanup(session, pkg):
|
|||
session.error("Please install 'pyyaml'.")
|
||||
return
|
||||
|
||||
with open(str(REPO_ROOT / "cicd" / "env-cleanup-files.yml")) as rfh:
|
||||
with open(str(REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")) as rfh:
|
||||
patterns = yaml.safe_load(rfh.read())
|
||||
|
||||
if pkg:
|
||||
|
@ -1578,10 +1636,122 @@ def changelog(session, draft, force):
|
|||
install_command = ["--progress-bar=off", "-r", requirements_file]
|
||||
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
||||
|
||||
town_cmd = ["towncrier", "--version={}".format(session.posargs[0])]
|
||||
town_cmd = ["towncrier", "build", "--version={}".format(session.posargs[0])]
|
||||
if draft:
|
||||
town_cmd.append("--draft")
|
||||
if force:
|
||||
# Do not ask, just remove news fragments
|
||||
town_cmd.append("--yes")
|
||||
session.run(*town_cmd)
|
||||
|
||||
|
||||
class Recompress:
|
||||
"""
|
||||
Helper class to re-compress a ``.tag.gz`` file to make it reproducible.
|
||||
"""
|
||||
|
||||
def __init__(self, mtime):
|
||||
self.mtime = int(mtime)
|
||||
|
||||
def tar_reset(self, tarinfo):
|
||||
"""
|
||||
Reset user, group, mtime, and mode to create reproducible tar.
|
||||
"""
|
||||
tarinfo.uid = tarinfo.gid = 0
|
||||
tarinfo.uname = tarinfo.gname = "root"
|
||||
tarinfo.mtime = self.mtime
|
||||
if tarinfo.type == tarfile.DIRTYPE:
|
||||
tarinfo.mode = 0o755
|
||||
else:
|
||||
tarinfo.mode = 0o644
|
||||
if tarinfo.pax_headers:
|
||||
raise ValueError(tarinfo.name, tarinfo.pax_headers)
|
||||
return tarinfo
|
||||
|
||||
def recompress(self, targz):
|
||||
"""
|
||||
Re-compress the passed path.
|
||||
"""
|
||||
tempd = pathlib.Path(tempfile.mkdtemp()).resolve()
|
||||
d_src = tempd.joinpath("src")
|
||||
d_src.mkdir()
|
||||
d_tar = tempd.joinpath(targz.stem)
|
||||
d_targz = tempd.joinpath(targz.name)
|
||||
with tarfile.open(d_tar, "w|") as wfile:
|
||||
with tarfile.open(targz, "r:gz") as rfile:
|
||||
rfile.extractall(d_src)
|
||||
extracted_dir = next(pathlib.Path(d_src).iterdir())
|
||||
for name in sorted(extracted_dir.rglob("*")):
|
||||
wfile.add(
|
||||
str(name),
|
||||
filter=self.tar_reset,
|
||||
recursive=False,
|
||||
arcname=str(name.relative_to(d_src)),
|
||||
)
|
||||
|
||||
with open(d_tar, "rb") as rfh:
|
||||
with gzip.GzipFile(
|
||||
fileobj=open(d_targz, "wb"), mode="wb", filename="", mtime=self.mtime
|
||||
) as gz: # pylint: disable=invalid-name
|
||||
while True:
|
||||
chunk = rfh.read(1024)
|
||||
if not chunk:
|
||||
break
|
||||
gz.write(chunk)
|
||||
targz.unlink()
|
||||
shutil.move(str(d_targz), str(targz))
|
||||
|
||||
|
||||
@nox.session(python="3")
|
||||
def build(session):
|
||||
"""
|
||||
Build source and binary distributions based off the current commit author date UNIX timestamp.
|
||||
|
||||
The reason being, reproducible packages.
|
||||
|
||||
.. code-block: shell
|
||||
|
||||
git show -s --format=%at HEAD
|
||||
"""
|
||||
shutil.rmtree("dist/", ignore_errors=True)
|
||||
if SKIP_REQUIREMENTS_INSTALL is False:
|
||||
session.install(
|
||||
"--progress-bar=off",
|
||||
"-r",
|
||||
"requirements/build.txt",
|
||||
silent=PIP_INSTALL_SILENT,
|
||||
)
|
||||
|
||||
timestamp = session.run(
|
||||
"git",
|
||||
"show",
|
||||
"-s",
|
||||
"--format=%at",
|
||||
"HEAD",
|
||||
silent=True,
|
||||
log=False,
|
||||
stderr=None,
|
||||
).strip()
|
||||
env = {"SOURCE_DATE_EPOCH": str(timestamp)}
|
||||
session.run(
|
||||
"python",
|
||||
"-m",
|
||||
"build",
|
||||
"--sdist",
|
||||
str(REPO_ROOT),
|
||||
env=env,
|
||||
)
|
||||
# Recreate sdist to be reproducible
|
||||
recompress = Recompress(timestamp)
|
||||
for targz in REPO_ROOT.joinpath("dist").glob("*.tar.gz"):
|
||||
session.log("Re-compressing %s...", targz.relative_to(REPO_ROOT))
|
||||
recompress.recompress(targz)
|
||||
|
||||
sha256sum = shutil.which("sha256sum")
|
||||
if sha256sum:
|
||||
packages = [
|
||||
str(pkg.relative_to(REPO_ROOT))
|
||||
for pkg in REPO_ROOT.joinpath("dist").iterdir()
|
||||
]
|
||||
session.run("sha256sum", *packages, external=True)
|
||||
session.run("python", "-m", "twine", "check", "dist/*")
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
common:
|
||||
dir_patterns: &common_dir_patterns
|
||||
- "**/__pycache__"
|
||||
- "**/lib/python3.*/test"
|
||||
- "**/lib/python3.*/idlelib"
|
||||
- "**/lib/python3.*/tkinter"
|
||||
- "**/lib/python3.*/turtledemo"
|
||||
- "**/site-packages/test"
|
||||
- "**/site-packages/tests"
|
||||
- "**/site-packages/*/test"
|
||||
|
@ -44,6 +48,7 @@ ci:
|
|||
- *common_file_patterns
|
||||
# Help files
|
||||
- "**/*.chm"
|
||||
- "**/Scripts/wmitest*"
|
||||
# Non Windows execution modules
|
||||
- "**/site-packages/salt/modules/aacme.py*"
|
||||
- "**/site-packages/salt/modules/aix.py*"
|
||||
|
@ -248,6 +253,7 @@ pkg:
|
|||
dir_patterns:
|
||||
- *ci_windows_dir_patterns
|
||||
- "**/salt/share"
|
||||
- "**/site-packages/pywin32_system32"
|
||||
file_patterns:
|
||||
- *ci_windows_file_patterns
|
||||
- "**/Scripts/py.exe"
|
|
@ -1,4 +1,4 @@
|
|||
salt (3006.0) stable; urgency=medium
|
||||
salt (SALT_RELEASE_VERSION) stable; urgency=medium
|
||||
|
||||
* Build of Salt with Relenv
|
||||
|
||||
|
|
|
@ -13,20 +13,44 @@ override_dh_auto_clean:
|
|||
rm -rf debian/salt-syndic
|
||||
rm -rf debian/salt-ssh
|
||||
|
||||
ifeq ("${SALT_ONEDIR_ARCHIVE}", "")
|
||||
override_dh_auto_build:
|
||||
mkdir build
|
||||
python3 -m virtualenv --python=python3 build/venv
|
||||
build/venv/bin/pip3 install relenv
|
||||
build/venv/bin/relenv fetch
|
||||
build/venv/bin/relenv toolchain fetch
|
||||
build/venv/bin/relenv create build/salt
|
||||
RELENV_PIP_DIR=yes build/salt/bin/pip3 install /salt
|
||||
mkdir -p build/onedir
|
||||
python3 -m venv --clear --copies build/onedir/venv
|
||||
build/onedir/venv/bin/python3 -m pip install relenv
|
||||
build/onedir/venv/bin/relenv fetch
|
||||
build/onedir/venv/bin/relenv toolchain fetch
|
||||
build/onedir/venv/bin/relenv create build/onedir/salt
|
||||
build/onedir/salt/bin/python3 -m pip install "pip>=22.3.1,<23.0" "setuptools>=65.6.3,<66" "wheel"
|
||||
export PY=$$(build/onedir/salt/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') \
|
||||
&& build/onedir/salt/bin/python3 -m pip install -r requirements/static/pkg/py$${PY}/linux.txt
|
||||
|
||||
# Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the <onedir>/bin directory
|
||||
find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \;
|
||||
|
||||
export USE_STATIC_REQUIREMENTS=1 \
|
||||
&& export RELENV_PIP_DIR=1 \
|
||||
&& build/onedir/salt/bin/python3 -m pip install --no-warn-script-location .
|
||||
build/onedir/salt/bin/python3 -m venv --clear --copies build/onedir/tools
|
||||
export PY=$$(build/onedir/tools/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') \
|
||||
&& build/onedir/tools/bin/python3 -m pip install -r requirements/static/ci/py$${PY}/tools.txt
|
||||
build/onedir/tools/bin/tools pkg pre-archive-cleanup --pkg build/onedir/salt
|
||||
else
|
||||
override_dh_auto_build:
|
||||
# The relenv onedir is being provided, all setup up until Salt is installed
|
||||
# is expected to be done
|
||||
mkdir -p build/onedir
|
||||
cd build/onedir; tar xvf ${SALT_ONEDIR_ARCHIVE}
|
||||
|
||||
# Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the <onedir>/bin directory
|
||||
find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \;
|
||||
endif
|
||||
|
||||
# dh_auto_install tries to invoke distutils causing failures.
|
||||
override_dh_auto_install:
|
||||
|
||||
|
||||
override_dh_install:
|
||||
mkdir -p debian/salt-common/usr/opt/saltstack
|
||||
cp -R build/salt debian/salt-common/usr/opt/saltstack/
|
||||
mkdir -p debian/salt-common/opt/saltstack
|
||||
cp -R build/onedir/salt debian/salt-common/opt/saltstack/
|
||||
dh_install
|
||||
|
|
|
@ -1 +1 @@
|
|||
usr/opt/saltstack/salt/salt-api /usr/bin/salt-api
|
||||
opt/saltstack/salt/salt-api /usr/bin/salt-api
|
|
@ -1 +1 @@
|
|||
usr/opt/saltstack/salt/salt-cloud /usr/bin/salt-cloud
|
||||
opt/saltstack/salt/salt-cloud /usr/bin/salt-cloud
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
usr/opt/saltstack/salt/spm /usr/bin/spm
|
||||
usr/opt/saltstack/salt/salt-pip /usr/bin/salt-pip
|
||||
opt/saltstack/salt/spm /usr/bin/spm
|
||||
opt/saltstack/salt/salt-pip /usr/bin/salt-pip
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
usr/opt/saltstack/salt/salt-master /usr/bin/salt-master
|
||||
usr/opt/saltstack/salt/salt usr/bin/salt
|
||||
usr/opt/saltstack/salt/salt-cp usr/bin/salt-cp
|
||||
usr/opt/saltstack/salt/salt-key usr/bin/salt-key
|
||||
usr/opt/saltstack/salt/salt-run usr/bin/salt-run
|
||||
opt/saltstack/salt/salt-master /usr/bin/salt-master
|
||||
opt/saltstack/salt/salt usr/bin/salt
|
||||
opt/saltstack/salt/salt-cp usr/bin/salt-cp
|
||||
opt/saltstack/salt/salt-key usr/bin/salt-key
|
||||
opt/saltstack/salt/salt-run usr/bin/salt-run
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
usr/opt/saltstack/salt/salt-minion /usr/bin/salt-minion
|
||||
usr/opt/saltstack/salt/salt-proxy /usr/bin/salt-proxy
|
||||
usr/opt/saltstack/salt/salt-call /usr/bin/salt-call
|
||||
opt/saltstack/salt/salt-minion /usr/bin/salt-minion
|
||||
opt/saltstack/salt/salt-proxy /usr/bin/salt-proxy
|
||||
opt/saltstack/salt/salt-call /usr/bin/salt-call
|
||||
|
|
|
@ -1 +1 @@
|
|||
usr/opt/saltstack/salt/salt-ssh /usr/bin/salt-ssh
|
||||
opt/saltstack/salt/salt-ssh /usr/bin/salt-ssh
|
||||
|
|
|
@ -1 +1 @@
|
|||
usr/opt/saltstack/salt/salt-syndic /usr/bin/salt-syndic
|
||||
opt/saltstack/salt/salt-syndic /usr/bin/salt-syndic
|
||||
|
|
|
@ -102,8 +102,8 @@
|
|||
#-------------------------------------------------------------------------------
|
||||
# Variables
|
||||
#-------------------------------------------------------------------------------
|
||||
SRC_DIR=$(git rev-parse --show-toplevel)
|
||||
SCRIPT_DIR="$SRC_DIR/pkg/osx"
|
||||
SRC_DIR="$(git rev-parse --show-toplevel)"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CPU_ARCH=$(uname -m)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -209,7 +209,11 @@ printf "v%.0s" {1..80}; printf "\n"
|
|||
#-------------------------------------------------------------------------------
|
||||
# Build and Sign Package
|
||||
#-------------------------------------------------------------------------------
|
||||
"$SCRIPT_DIR/package.sh" "$VERSION"
|
||||
if [ "$(id -un)" != "root" ]; then
|
||||
sudo "$SCRIPT_DIR/package.sh" "$VERSION"
|
||||
else
|
||||
"$SCRIPT_DIR/package.sh" "$VERSION"
|
||||
fi
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Notarize Package
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
# Variables
|
||||
#-------------------------------------------------------------------------------
|
||||
SRC_DIR="$(git rev-parse --show-toplevel)"
|
||||
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REMOVE_DIRS=(
|
||||
"$SCRIPT_DIR/relative-environment-for-python"
|
||||
"$HOME/.local/relenv"
|
||||
|
|
|
@ -44,25 +44,17 @@
|
|||
# export DEV_INSTALL_CERT="Developer ID Installer: Salt Stack, Inc. (AB123ABCD1)"
|
||||
#
|
||||
################################################################################
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Variables
|
||||
#-------------------------------------------------------------------------------
|
||||
# Get/Set Version
|
||||
if [ "$1" == "" ]; then
|
||||
VERSION=$(git describe)
|
||||
else
|
||||
VERSION=$1
|
||||
fi
|
||||
|
||||
# Strip the v from the beginning
|
||||
VERSION=${VERSION#"v"}
|
||||
|
||||
CPU_ARCH="$(uname -m)"
|
||||
SRC_DIR="$(git rev-parse --show-toplevel)"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
DIST_XML="$SCRIPT_DIR/distribution.xml"
|
||||
BUILD_DIR="$SCRIPT_DIR/build"
|
||||
PKG_RESOURCES=$SRC_DIR/pkg/osx
|
||||
CMD_OUTPUT=$(mktemp -t cmd.log)
|
||||
CMD_OUTPUT=$(mktemp -t cmd_log.XXX)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Functions
|
||||
|
@ -79,6 +71,7 @@ _usage() {
|
|||
echo ""
|
||||
echo " -h, --help this message"
|
||||
echo " -v, --version version of Salt display in the package"
|
||||
echo " -n, --nightly don't sign the package"
|
||||
echo ""
|
||||
echo " To build the Salt package:"
|
||||
echo " example: $0 3006.1-1"
|
||||
|
@ -119,18 +112,33 @@ while true; do
|
|||
_usage
|
||||
exit 0
|
||||
;;
|
||||
-*)
|
||||
-n | --nightly )
|
||||
NIGHTLY=1
|
||||
shift
|
||||
;;
|
||||
-v | --version )
|
||||
shift
|
||||
VERSION="$1"
|
||||
shift
|
||||
;;
|
||||
-* )
|
||||
echo "Invalid Option: $1"
|
||||
echo ""
|
||||
_usage
|
||||
exit 1
|
||||
;;
|
||||
* )
|
||||
VERSION="$1"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$VERSION" ]; then
|
||||
VERSION=$(git describe)
|
||||
fi
|
||||
VERSION=${VERSION#"v"}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Delete temporary files on exit
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -152,18 +160,17 @@ printf -- "-%.0s" {1..80}; printf "\n"
|
|||
if [[ ! -e "$SRC_DIR/.git" ]] && [[ ! -e "$SRC_DIR/scripts/salt" ]]; then
|
||||
echo "This directory doesn't appear to be a git repository."
|
||||
echo "The macOS build process needs some files from a Git checkout of Salt."
|
||||
echo "Run this script from the 'pkg/osx' directory of the Git checkout."
|
||||
echo "Run this script from the 'pkg/macos' directory of the Git checkout."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Add Title, Description, Version and CPU Arch to distribution.xml
|
||||
#-------------------------------------------------------------------------------
|
||||
DIST="$PKG_RESOURCES/distribution.xml"
|
||||
if [ -f "$DIST" ]; then
|
||||
if [ -f "$DIST_XML" ]; then
|
||||
_msg "Removing existing distribution.xml"
|
||||
rm -f "$DIST"
|
||||
if ! [ -f "$DIST" ]; then
|
||||
rm -f "$DIST_XML"
|
||||
if ! [ -f "$DIST_XML" ]; then
|
||||
_success
|
||||
else
|
||||
_failure
|
||||
|
@ -171,11 +178,11 @@ if [ -f "$DIST" ]; then
|
|||
fi
|
||||
|
||||
_msg "Creating distribution.xml"
|
||||
cp "$PKG_RESOURCES/distribution.xml.dist" "$DIST"
|
||||
if [ -f "$DIST" ]; then
|
||||
cp "$SCRIPT_DIR/distribution.xml.dist" "$DIST_XML"
|
||||
if [ -f "$DIST_XML" ]; then
|
||||
_success
|
||||
else
|
||||
CMD_OUTPUT="Failed to copy: $DIST"
|
||||
CMD_OUTPUT="Failed to copy: $DIST_XML"
|
||||
_failure
|
||||
fi
|
||||
|
||||
|
@ -183,8 +190,8 @@ fi
|
|||
# be able to check it
|
||||
_msg "Setting package version"
|
||||
SED_STR="s/@VERSION@/$VERSION/g"
|
||||
sed -i "" "$SED_STR" "$DIST"
|
||||
if grep -q "$VERSION" "$DIST"; then
|
||||
sed -i "" "$SED_STR" "$DIST_XML"
|
||||
if grep -q "$VERSION" "$DIST_XML"; then
|
||||
_success
|
||||
else
|
||||
CMD_OUTPUT="Failed to set: $VERSION"
|
||||
|
@ -194,8 +201,8 @@ fi
|
|||
_msg "Setting package title"
|
||||
TITLE="Salt $VERSION (Python 3)"
|
||||
SED_STR="s/@TITLE@/$TITLE/g"
|
||||
sed -i "" "$SED_STR" "$DIST"
|
||||
if grep -q "$TITLE" "$DIST"; then
|
||||
sed -i "" "$SED_STR" "$DIST_XML"
|
||||
if grep -q "$TITLE" "$DIST_XML"; then
|
||||
_success
|
||||
else
|
||||
CMD_OUTPUT="Failed to set: $TITLE"
|
||||
|
@ -205,8 +212,8 @@ fi
|
|||
_msg "Setting package description"
|
||||
DESC="Salt $VERSION with Python 3"
|
||||
SED_STR="s/@DESC@/$DESC/g"
|
||||
sed -i "" "$SED_STR" "$DIST"
|
||||
if grep -q "$DESC" "$DIST"; then
|
||||
sed -i "" "$SED_STR" "$DIST_XML"
|
||||
if grep -q "$DESC" "$DIST_XML"; then
|
||||
_success
|
||||
else
|
||||
CMD_OUTPUT="Failed to set: $DESC"
|
||||
|
@ -215,8 +222,8 @@ fi
|
|||
|
||||
_msg "Setting package architecture"
|
||||
SED_STR="s/@CPU_ARCH@/$CPU_ARCH/g"
|
||||
sed -i "" "$SED_STR" "$DIST"
|
||||
if grep -q "$CPU_ARCH" "$DIST"; then
|
||||
sed -i "" "$SED_STR" "$DIST_XML"
|
||||
if grep -q "$CPU_ARCH" "$DIST_XML"; then
|
||||
_success
|
||||
else
|
||||
CMD_OUTPUT="Failed to set: $CPU_ARCH"
|
||||
|
@ -229,9 +236,9 @@ fi
|
|||
|
||||
_msg "Building the source package"
|
||||
# Build the src package
|
||||
FILE="salt-src-$VERSION-py3-$CPU_ARCH.pkg"
|
||||
FILE="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg"
|
||||
if pkgbuild --root="$BUILD_DIR" \
|
||||
--scripts=pkg-scripts \
|
||||
--scripts="$SCRIPT_DIR/pkg-scripts" \
|
||||
--identifier=com.saltstack.salt \
|
||||
--version="$VERSION" \
|
||||
--ownership=recommended \
|
||||
|
@ -242,22 +249,38 @@ else
|
|||
fi
|
||||
|
||||
|
||||
_msg "Building the product package (signed)"
|
||||
FILE="salt-$VERSION-py3-$CPU_ARCH-signed.pkg"
|
||||
if productbuild --resources=pkg-resources \
|
||||
--distribution=distribution.xml \
|
||||
--package-path="salt-src-$VERSION-py3-$CPU_ARCH.pkg" \
|
||||
--version="$VERSION" \
|
||||
--sign "$DEV_INSTALL_CERT" \
|
||||
--timestamp \
|
||||
"$FILE" > "$CMD_OUTPUT" 2>&1; then
|
||||
_success
|
||||
if [ -z "${NIGHTLY}" ]; then
|
||||
_msg "Building the product package (signed)"
|
||||
# This is not a nightly build, so we want to sign it
|
||||
FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH-signed.pkg"
|
||||
if productbuild --resources="$SCRIPT_DIR/pkg-resources" \
|
||||
--distribution="$DIST_XML" \
|
||||
--package-path="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" \
|
||||
--version="$VERSION" \
|
||||
--sign "$DEV_INSTALL_CERT" \
|
||||
--timestamp \
|
||||
"$FILE" > "$CMD_OUTPUT" 2>&1; then
|
||||
_success
|
||||
else
|
||||
_failure
|
||||
fi
|
||||
else
|
||||
_failure
|
||||
_msg "Building the product package (unsigned)"
|
||||
# This is a nightly build, so we don't sign it
|
||||
FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH-unsigned.pkg"
|
||||
if productbuild --resources="$SCRIPT_DIR/pkg-resources" \
|
||||
--distribution="$DIST_XML" \
|
||||
--package-path="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" \
|
||||
--version="$VERSION" \
|
||||
"$FILE" > "$CMD_OUTPUT" 2>&1; then
|
||||
_success
|
||||
else
|
||||
_failure
|
||||
fi
|
||||
fi
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Start
|
||||
# Script Completed
|
||||
#-------------------------------------------------------------------------------
|
||||
printf -- "-%.0s" {1..80}; printf "\n"
|
||||
echo "Building Salt Package Completed"
|
||||
|
|
43
pkg/macos/prep_salt.sh
Normal file → Executable file
43
pkg/macos/prep_salt.sh
Normal file → Executable file
|
@ -20,19 +20,11 @@
|
|||
# ./prep_salt.sh
|
||||
#
|
||||
################################################################################
|
||||
#-------------------------------------------------------------------------------
|
||||
# Variables
|
||||
#-------------------------------------------------------------------------------
|
||||
SRC_DIR="$(git rev-parse --show-toplevel)"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BUILD_DIR="$SCRIPT_DIR/build"
|
||||
CONF_DIR="$BUILD_DIR/etc/salt"
|
||||
PKG_RESOURCES=$SRC_DIR/pkg/osx
|
||||
CMD_OUTPUT=$(mktemp -t cmd.log)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Functions
|
||||
# Script Functions
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# _usage
|
||||
#
|
||||
# Prints out help text
|
||||
|
@ -43,7 +35,8 @@ _usage() {
|
|||
echo "usage: ${0}"
|
||||
echo " [-h|--help]"
|
||||
echo ""
|
||||
echo " -h, --help this message"
|
||||
echo " -h, --help this message"
|
||||
echo " -b, --build-dir the location of the build directory"
|
||||
echo ""
|
||||
echo " To build the Salt package:"
|
||||
echo " example: $0"
|
||||
|
@ -84,7 +77,12 @@ while true; do
|
|||
_usage
|
||||
exit 0
|
||||
;;
|
||||
-*)
|
||||
-b | --build-dir )
|
||||
shift
|
||||
BUILD_DIR="$*"
|
||||
shift
|
||||
;;
|
||||
-* )
|
||||
echo "Invalid Option: $1"
|
||||
echo ""
|
||||
_usage
|
||||
|
@ -96,6 +94,17 @@ while true; do
|
|||
esac
|
||||
done
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Variables
|
||||
#-------------------------------------------------------------------------------
|
||||
SRC_DIR="$(git rev-parse --show-toplevel)"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
if [ -z "$BUILD_DIR" ]; then
|
||||
BUILD_DIR="$SCRIPT_DIR/build"
|
||||
fi
|
||||
CONF_DIR="$BUILD_DIR/etc/salt"
|
||||
CMD_OUTPUT=$(mktemp -t cmd.log)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Delete temporary files on exit
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -117,7 +126,7 @@ printf -- "-%.0s" {1..80}; printf "\n"
|
|||
if [[ ! -e "$SRC_DIR/.git" ]] && [[ ! -e "$SRC_DIR/scripts/salt" ]]; then
|
||||
echo "This directory doesn't appear to be a git repository."
|
||||
echo "The macOS build process needs some files from a Git checkout of Salt."
|
||||
echo "Run this script from the 'pkg/osx' directory of the Git checkout."
|
||||
echo "Run this script from the 'pkg/macos' directory of the Git checkout."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
@ -125,9 +134,13 @@ fi
|
|||
# Copy salt-config from Salt Repo to /opt/salt
|
||||
#-------------------------------------------------------------------------------
|
||||
SALT_DIR="$BUILD_DIR/opt/salt"
|
||||
if ! [ -d "$SALT_DIR" ]; then
|
||||
# We only need this for relenv builds
|
||||
mkdir -p "$SALT_DIR"
|
||||
fi
|
||||
if ! [ -f "$SALT_DIR/salt-config.sh" ]; then
|
||||
_msg "Staging Salt config script"
|
||||
cp "$PKG_RESOURCES/scripts/salt-config.sh" "$SALT_DIR/"
|
||||
cp "$SCRIPT_DIR/scripts/salt-config.sh" "$SALT_DIR/"
|
||||
if [ -f "$SALT_DIR/salt-config.sh" ]; then
|
||||
_success
|
||||
else
|
||||
|
@ -158,7 +171,7 @@ for i in "${ITEMS[@]}"; do
|
|||
FILE="$BUILD_DIR/Library/LaunchDaemons/com.saltstack.salt.$i.plist"
|
||||
if ! [ -f "$FILE" ]; then
|
||||
_msg "Copying $i service definition"
|
||||
cp "$PKG_RESOURCES/scripts/com.saltstack.salt.$i.plist" "$FILE"
|
||||
cp "$SCRIPT_DIR/scripts/com.saltstack.salt.$i.plist" "$FILE"
|
||||
if [ -f "$FILE" ]; then
|
||||
_success
|
||||
else
|
||||
|
|
|
@ -44,9 +44,8 @@
|
|||
#-------------------------------------------------------------------------------
|
||||
# Variables
|
||||
#-------------------------------------------------------------------------------
|
||||
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BUILD_DIR="$SCRIPT_DIR/build"
|
||||
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
CMD_OUTPUT=$(mktemp -t cmd.log)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -28,7 +28,6 @@ URL: https://saltproject.io/
|
|||
|
||||
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
|
||||
BuildArch: x86_64
|
||||
|
||||
%ifarch %{ix86} x86_64
|
||||
Requires: dmidecode
|
||||
|
@ -50,7 +49,9 @@ BuildRequires: python3
|
|||
BuildRequires: python3-pip
|
||||
BuildRequires: openssl
|
||||
BuildRequires: git
|
||||
%if %{rhel} >= 9
|
||||
BuildRequires: libxcrypt-compat
|
||||
%endif
|
||||
|
||||
%description
|
||||
Salt is a distributed remote execution system used to execute commands and
|
||||
|
@ -128,25 +129,48 @@ unset CXXFLAGS
|
|||
unset CFLAGS
|
||||
unset LDFLAGS
|
||||
rm -rf $RPM_BUILD_DIR
|
||||
mkdir -p $RPM_BUILD_DIR/opt/saltstack
|
||||
mkdir -p $RPM_BUILD_DIR/usr/bin
|
||||
mkdir -p $RPM_BUILD_DIR/build
|
||||
cd $RPM_BUILD_DIR
|
||||
python3 -m pip install relenv
|
||||
relenv fetch
|
||||
relenv toolchain fetch
|
||||
relenv create $RPM_BUILD_DIR/opt/saltstack/salt
|
||||
env RELENV_PIP_DIR=yes $RPM_BUILD_DIR/opt/saltstack/salt/bin/pip3 install --no-cache -v %{_salt_src}
|
||||
# jmsepath doesn't use pip scripts
|
||||
rm $RPM_BUILD_DIR/opt/saltstack/salt/jp.py
|
||||
|
||||
%if "%{getenv:SALT_ONEDIR_ARCHIVE}" == ""
|
||||
python3 -m venv --clear --copies build/venv
|
||||
build/venv/bin/python3 -m pip install relenv
|
||||
build/venv/bin/relenv fetch
|
||||
build/venv/bin/relenv toolchain fetch
|
||||
build/venv/bin/relenv create build/salt
|
||||
build/salt/bin/python3 -m pip install "pip>=22.3.1,<23.0" "setuptools>=65.6.3,<66" "wheel"
|
||||
export PY=$(build/salt/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()')
|
||||
build/salt/bin/python3 -m pip install -r %{_salt_src}/requirements/static/pkg/py${PY}/linux.txt
|
||||
|
||||
# Fix any hardcoded paths to the relenv python binary on any of the scripts installed in
|
||||
# the <onedir>/bin directory
|
||||
find build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \;
|
||||
|
||||
export USE_STATIC_REQUIREMENTS=1
|
||||
export RELENV_PIP_DIR=1
|
||||
build/salt/bin/python3 -m pip install --no-warn-script-location %{_salt_src}
|
||||
|
||||
build/salt/bin/python3 -m venv --clear --copies build/tools
|
||||
build/tools/bin/python3 -m pip install -r %{_salt_src}/requirements/static/ci/py${PY}/tools.txt
|
||||
cd %{_salt_src}
|
||||
$RPM_BUILD_DIR/build/tools/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt
|
||||
%else
|
||||
# The relenv onedir is being provided, all setup up until Salt is installed
|
||||
# is expected to be done
|
||||
cd build
|
||||
tar xf ${SALT_ONEDIR_ARCHIVE}
|
||||
|
||||
# Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the <onedir>/bin directory
|
||||
find salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \;
|
||||
|
||||
cd $RPM_BUILD_DIR
|
||||
%endif
|
||||
|
||||
|
||||
%install
|
||||
rm -rf %{buildroot}
|
||||
mkdir -p %{buildroot}/opt/saltstack
|
||||
cp -R $RPM_BUILD_DIR/* %{buildroot}/
|
||||
mkdir -p %{buildroot}/opt/saltstack/salt
|
||||
# pip installs directory
|
||||
mkdir -p %{buildroot}/opt/saltstack/salt/pypath/
|
||||
cp -R $RPM_BUILD_DIR/build/salt %{buildroot}/opt/saltstack/
|
||||
|
||||
# Add some directories
|
||||
install -d -m 0755 %{buildroot}%{_var}/log/salt
|
||||
|
@ -295,6 +319,9 @@ rm -rf %{buildroot}
|
|||
ln -s -f /opt/saltstack/salt/spm %{_bindir}/spm
|
||||
ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip
|
||||
|
||||
%post cloud
|
||||
ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud
|
||||
|
||||
%post master
|
||||
%systemd_post salt-master.service
|
||||
ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt
|
||||
|
@ -331,6 +358,9 @@ if [ $1 -lt 2 ]; then
|
|||
fi
|
||||
fi
|
||||
|
||||
%post ssh
|
||||
ln -s -f /opt/saltstack/salt/salt-ssh %{_bindir}/salt-ssh
|
||||
|
||||
%post api
|
||||
%systemd_post salt-api.service
|
||||
ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api
|
||||
|
|
|
@ -30,7 +30,7 @@ param(
|
|||
[String] $Version,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[ValidateSet("x86", "x64")]
|
||||
[ValidateSet("x86", "x64", "amd64")]
|
||||
[Alias("a")]
|
||||
# The System Architecture to build. "x86" will build a 32-bit installer.
|
||||
# "x64" will build a 64-bit installer. Default is: x64
|
||||
|
@ -66,11 +66,23 @@ param(
|
|||
[Alias("b")]
|
||||
# Build python from source instead of fetching a tarball
|
||||
# Requires VC Build Tools
|
||||
[Switch] $Build
|
||||
[Switch] $Build,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
# Don't install. It should already be installed
|
||||
[Switch] $SkipInstall
|
||||
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Preferences
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
|
@ -80,6 +92,10 @@ $ErrorActionPreference = "Stop"
|
|||
$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName
|
||||
$PROJECT_DIR = $(git rev-parse --show-toplevel)
|
||||
|
||||
if ( $Architecture -eq "amd64" ) {
|
||||
$Architecture = "x64"
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Verify Salt and Version
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -114,7 +130,11 @@ Write-Host $("v" * 80)
|
|||
# Install NSIS
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
& "$SCRIPT_DIR\install_nsis.ps1"
|
||||
$KeywordArguments = @{}
|
||||
if ( $CICD ) {
|
||||
$KeywordArguments["CICD"] = $true
|
||||
}
|
||||
& "$SCRIPT_DIR\install_nsis.ps1" @KeywordArguments
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to install NSIS"
|
||||
exit 1
|
||||
|
@ -124,7 +144,11 @@ if ( ! $? ) {
|
|||
# Install WIX
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
& "$SCRIPT_DIR\install_wix.ps1"
|
||||
$KeywordArguments = @{}
|
||||
if ( $CICD ) {
|
||||
$KeywordArguments["CICD"] = $true
|
||||
}
|
||||
& "$SCRIPT_DIR\install_wix.ps1" @KeywordArguments
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to install WIX"
|
||||
exit 1
|
||||
|
@ -134,34 +158,53 @@ if ( ! $? ) {
|
|||
# Install Visual Studio Build Tools
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
& "$SCRIPT_DIR\install_vs_buildtools.ps1"
|
||||
$KeywordArguments = @{}
|
||||
if ( $CICD ) {
|
||||
$KeywordArguments["CICD"] = $true
|
||||
}
|
||||
& "$SCRIPT_DIR\install_vs_buildtools.ps1" @KeywordArguments
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to install Visual Studio Build Tools"
|
||||
exit 1
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Build Python
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
$KeywordArguments = @{
|
||||
Version = $PythonVersion
|
||||
Architecture = $Architecture
|
||||
}
|
||||
if ( $Build ) {
|
||||
$KeywordArguments["Build"] = $true
|
||||
}
|
||||
& "$SCRIPT_DIR\build_python.ps1" @KeywordArguments
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to build Python"
|
||||
exit 1
|
||||
if ( ! $SkipInstall ) {
|
||||
#-------------------------------------------------------------------------------
|
||||
# Build Python
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
$KeywordArguments = @{
|
||||
Version = $PythonVersion
|
||||
Architecture = $Architecture
|
||||
}
|
||||
if ( $Build ) {
|
||||
$KeywordArguments["Build"] = $true
|
||||
}
|
||||
if ( $CICD ) {
|
||||
$KeywordArguments["CICD"] = $true
|
||||
}
|
||||
|
||||
& "$SCRIPT_DIR\build_python.ps1" @KeywordArguments
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to build Python"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Install Salt
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
& "$SCRIPT_DIR\install_salt.ps1"
|
||||
$KeywordArguments = @{}
|
||||
if ( $CICD ) {
|
||||
$KeywordArguments["CICD"] = $true
|
||||
}
|
||||
if ( $SkipInstall ) {
|
||||
$KeywordArguments["SkipInstall"] = $true
|
||||
}
|
||||
$KeywordArguments["PKG"] = $true
|
||||
& "$SCRIPT_DIR\install_salt.ps1" @KeywordArguments
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to install Salt"
|
||||
exit 1
|
||||
|
@ -171,7 +214,12 @@ if ( ! $? ) {
|
|||
# Prep Salt for Packaging
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
& "$SCRIPT_DIR\prep_salt.ps1"
|
||||
$KeywordArguments = @{}
|
||||
if ( $CICD ) {
|
||||
$KeywordArguments["CICD"] = $true
|
||||
}
|
||||
$KeywordArguments["PKG"] = $true
|
||||
& "$SCRIPT_DIR\prep_salt.ps1" @KeywordArguments
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to Prepare Salt for packaging"
|
||||
exit 1
|
||||
|
@ -185,8 +233,11 @@ $KeywordArguments = @{}
|
|||
if ( ! [String]::IsNullOrEmpty($Version) ) {
|
||||
$KeywordArguments.Add("Version", $Version)
|
||||
}
|
||||
if ( $CICD ) {
|
||||
$KeywordArguments["CICD"] = $true
|
||||
}
|
||||
|
||||
powershell -file "$SCRIPT_DIR\nsis\build_pkg.ps1" @KeywordArguments
|
||||
& "$SCRIPT_DIR\nsis\build_pkg.ps1" @KeywordArguments
|
||||
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to build NSIS package"
|
||||
|
@ -197,7 +248,7 @@ if ( ! $? ) {
|
|||
# Build MSI Package
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
powershell -file "$SCRIPT_DIR\msi\build_pkg.ps1" @KeywordArguments
|
||||
& "$SCRIPT_DIR\msi\build_pkg.ps1" @KeywordArguments
|
||||
|
||||
if ( ! $? ) {
|
||||
Write-Host "Failed to build NSIS package"
|
||||
|
|
|
@ -41,7 +41,7 @@ param(
|
|||
[String] $Version = "3.8.16",
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[ValidateSet("x64", "x86")]
|
||||
[ValidateSet("x64", "x86", "amd64")]
|
||||
[Alias("a")]
|
||||
# The System Architecture to build. "x86" will build a 32-bit installer.
|
||||
# "x64" will build a 64-bit installer. Default is: x64
|
||||
|
@ -51,7 +51,12 @@ param(
|
|||
[Alias("b")]
|
||||
# Build python from source instead of fetching a tarball
|
||||
# Requires VC Build Tools
|
||||
[Switch] $Build
|
||||
[Switch] $Build,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD
|
||||
|
||||
)
|
||||
|
||||
|
@ -63,14 +68,21 @@ param(
|
|||
$ProgressPreference = "SilentlyContinue"
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
if ( $Architecture -eq "amd64" ) {
|
||||
$Architecture = "x64"
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Functions
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Start the Script
|
||||
|
|
|
@ -20,8 +20,17 @@ build.ps1
|
|||
build.ps1 -Version 3005 -PythonVersion 3.8.13
|
||||
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Preferences
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
|
@ -37,8 +46,12 @@ $RELENV_DIR = "${env:LOCALAPPDATA}\relenv"
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -10,6 +10,12 @@ required to build the Salt installer
|
|||
install_nsis.ps1
|
||||
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Preferences
|
||||
|
@ -24,8 +30,12 @@ $ErrorActionPreference = "Stop"
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -13,6 +13,29 @@ ready to be packaged.
|
|||
install_salt.ps1
|
||||
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("b")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[String] $BuildDir,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
# Don't install. It should already be installed
|
||||
[Switch] $SkipInstall,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
# Path to a Salt source tarball which be used to install Salt.
|
||||
[String] $SourceTarball,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
# When true, additional routines are done to prepare for packaging.
|
||||
[Switch] $PKG
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Preferences
|
||||
|
@ -27,8 +50,12 @@ $ErrorActionPreference = "Stop"
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -37,7 +64,11 @@ function Write-Result($result, $ForegroundColor="Green") {
|
|||
|
||||
# Python Variables
|
||||
$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName
|
||||
$BUILD_DIR = "$SCRIPT_DIR\buildenv"
|
||||
if ( $BuildDir ) {
|
||||
$BUILD_DIR = $BuildDir
|
||||
} else {
|
||||
$BUILD_DIR = "$SCRIPT_DIR\buildenv"
|
||||
}
|
||||
$SITE_PKGS_DIR = "$BUILD_DIR\Lib\site-packages"
|
||||
$SCRIPTS_DIR = "$BUILD_DIR\Scripts"
|
||||
$PYTHON_BIN = "$SCRIPTS_DIR\python.exe"
|
||||
|
@ -54,55 +85,57 @@ if ( $ARCH -eq "64bit" ) {
|
|||
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/32"
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Start the Script
|
||||
#-------------------------------------------------------------------------------
|
||||
Write-Host $("=" * 80)
|
||||
Write-Host "Install Salt into Python Environment" -ForegroundColor Cyan
|
||||
Write-Host "- Architecture: $ARCH"
|
||||
Write-Host $("-" * 80)
|
||||
if ( ! $SkipInstall ) {
|
||||
#-------------------------------------------------------------------------------
|
||||
# Start the Script
|
||||
#-------------------------------------------------------------------------------
|
||||
Write-Host $("=" * 80)
|
||||
Write-Host "Install Salt into Python Environment" -ForegroundColor Cyan
|
||||
Write-Host "- Architecture: $ARCH"
|
||||
Write-Host $("-" * 80)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Installing Salt
|
||||
#-------------------------------------------------------------------------------
|
||||
# We don't want to use an existing salt installation because we don't know what
|
||||
# it is
|
||||
Write-Host "Checking for existing Salt installation: " -NoNewline
|
||||
if ( ! (Test-Path -Path "$SCRIPTS_DIR\salt-minion.exe") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
#-------------------------------------------------------------------------------
|
||||
# Installing Salt
|
||||
#-------------------------------------------------------------------------------
|
||||
# We don't want to use an existing salt installation because we don't know what
|
||||
# it is
|
||||
Write-Host "Checking for existing Salt installation: " -NoNewline
|
||||
if ( ! (Test-Path -Path "$SCRIPTS_DIR\salt-minion.exe") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Cleaning previous builds
|
||||
$remove = "build", "dist"
|
||||
$remove | ForEach-Object {
|
||||
if ( Test-Path -Path "$PROJECT_DIR\$_" ) {
|
||||
Write-Host "Removing $_`:" -NoNewline
|
||||
Remove-Item -Path "$PROJECT_DIR\$_" -Recurse -Force
|
||||
if ( ! (Test-Path -Path "$PROJECT_DIR\$_") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
# Cleaning previous builds
|
||||
$remove = "build", "dist"
|
||||
$remove | ForEach-Object {
|
||||
if ( Test-Path -Path "$PROJECT_DIR\$_" ) {
|
||||
Write-Host "Removing $_`:" -NoNewline
|
||||
Remove-Item -Path "$PROJECT_DIR\$_" -Recurse -Force
|
||||
if ( ! (Test-Path -Path "$PROJECT_DIR\$_") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Installing dependencies
|
||||
#-------------------------------------------------------------------------------
|
||||
Write-Host "Installing dependencies: " -NoNewline
|
||||
Start-Process -FilePath $SCRIPTS_DIR\pip3.exe `
|
||||
-ArgumentList "install", "-r", "$SALT_DEPS" `
|
||||
-WorkingDirectory "$PROJECT_DIR" `
|
||||
-Wait -WindowStyle Hidden
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\distro.exe" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
#-------------------------------------------------------------------------------
|
||||
# Installing dependencies
|
||||
#-------------------------------------------------------------------------------
|
||||
Write-Host "Installing dependencies: " -NoNewline
|
||||
Start-Process -FilePath $SCRIPTS_DIR\pip3.exe `
|
||||
-ArgumentList "install", "-r", "$SALT_DEPS" `
|
||||
-WorkingDirectory "$PROJECT_DIR" `
|
||||
-Wait -WindowStyle Hidden
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\distro.exe" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -131,91 +164,20 @@ if ( ! (Test-Path -Path "$SCRIPTS_DIR\wmitest*") ) {
|
|||
$dlls = "pythoncom38.dll",
|
||||
"pywintypes38.dll"
|
||||
$dlls | ForEach-Object {
|
||||
Write-Host "Copying $_ to Scripts: " -NoNewline
|
||||
Copy-Item "$SITE_PKGS_DIR\pywin32_system32\$_" "$SCRIPTS_DIR" -Force | Out-Null
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\$_") {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
if ( -not ( Test-Path -Path "$SCRIPTS_DIR\$_" ) ) {
|
||||
Write-Host "Copying $_ to Scripts: " -NoNewline
|
||||
Copy-Item "$SITE_PKGS_DIR\pywin32_system32\$_" "$SCRIPTS_DIR" -Force | Out-Null
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\$_") {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
Write-Host "Moving $_ to win32: " -NoNewline
|
||||
Move-Item "$SITE_PKGS_DIR\pywin32_system32\$_" "$SITE_PKGS_DIR\win32" -Force | Out-Null
|
||||
if ( Test-Path -Path "$SITE_PKGS_DIR\win32\$_" ){
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Remove pywin32_system32 directory since it is now empty
|
||||
Write-Host "Removing pywin32_system32 directory: " -NoNewline
|
||||
Remove-Item -Path "$SITE_PKGS_DIR\pywin32_system32" | Out-Null
|
||||
if ( ! (Test-Path -Path "$SITE_PKGS_DIR\pywin32_system32") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Remove PyWin32 PostInstall & testall scripts
|
||||
Write-Host "Removing pywin32 post-install scripts: " -NoNewline
|
||||
Remove-Item -Path "$SCRIPTS_DIR\pywin32_*" -Force | Out-Null
|
||||
if ( ! (Test-Path -Path "$SCRIPTS_DIR\pywin32_*") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create gen_py directory
|
||||
Write-Host "Creating gen_py directory: " -NoNewline
|
||||
New-Item -Path "$SITE_PKGS_DIR\win32com\gen_py" -ItemType Directory -Force | Out-Null
|
||||
if ( Test-Path -Path "$SITE_PKGS_DIR\win32com\gen_py" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Installing Salt
|
||||
#-------------------------------------------------------------------------------
|
||||
Write-Host "Installing Salt: " -NoNewline
|
||||
# We're setting RELENV_PIP_DIR so the binaries will be placed in the root
|
||||
try {
|
||||
$env:RELENV_PIP_DIR = "yes"
|
||||
Start-Process -FilePath $SCRIPTS_DIR\pip3.exe `
|
||||
-ArgumentList "install", "." `
|
||||
-WorkingDirectory "$PROJECT_DIR" `
|
||||
-Wait -WindowStyle Hidden
|
||||
} finally {
|
||||
Remove-Item env:\RELENV_PIP_DIR
|
||||
}
|
||||
if ( Test-Path -Path "$BUILD_DIR\salt-minion.exe" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Remove fluff
|
||||
$remove = "doc",
|
||||
"readme",
|
||||
"salt-api",
|
||||
"salt-key",
|
||||
"salt-run",
|
||||
"salt-syndic",
|
||||
"salt-unity",
|
||||
"share",
|
||||
"spm",
|
||||
"wheel"
|
||||
$remove | ForEach-Object {
|
||||
if ( Test-Path -Path "$BUILD_DIR\$_*" ) {
|
||||
Write-Host "Removing $_`: " -NoNewline
|
||||
Remove-Item -Path "$BUILD_DIR\$_*" -Recurse
|
||||
if ( ! ( Test-Path -Path "$BUILD_DIR\$_*" ) ) {
|
||||
if ( -not ( Test-Path -Path "$SITE_PKGS_DIR\win32\$_" ) ) {
|
||||
Write-Host "Moving $_ to win32: " -NoNewline
|
||||
Copy-Item "$SITE_PKGS_DIR\pywin32_system32\$_" "$SITE_PKGS_DIR\win32" -Force | Out-Null
|
||||
if ( Test-Path -Path "$SITE_PKGS_DIR\win32\$_" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
|
@ -224,6 +186,98 @@ $remove | ForEach-Object {
|
|||
}
|
||||
}
|
||||
|
||||
if ( $PKG ) {
|
||||
# Remove pywin32_system32 directory since it is now empty
|
||||
if ( Test-Path -Path "$SITE_PKGS_DIR\pywin32_system32" ) {
|
||||
Write-Host "Removing pywin32_system32 directory: " -NoNewline
|
||||
Remove-Item -Path "$SITE_PKGS_DIR\pywin32_system32" -Recurse | Out-Null
|
||||
if ( ! (Test-Path -Path "$SITE_PKGS_DIR\pywin32_system32") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Remove PyWin32 PostInstall & testall scripts
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\pywin32_*" ) {
|
||||
Write-Host "Removing pywin32 post-install scripts: " -NoNewline
|
||||
Remove-Item -Path "$SCRIPTS_DIR\pywin32_*" -Force | Out-Null
|
||||
if ( ! (Test-Path -Path "$SCRIPTS_DIR\pywin32_*") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Create gen_py directory
|
||||
if ( ! (Test-Path -Path "$SITE_PKGS_DIR\win32com\gen_py" ) ) {
|
||||
Write-Host "Creating gen_py directory: " -NoNewline
|
||||
New-Item -Path "$SITE_PKGS_DIR\win32com\gen_py" -ItemType Directory -Force | Out-Null
|
||||
if ( Test-Path -Path "$SITE_PKGS_DIR\win32com\gen_py" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
if ( ! $SkipInstall ) {
|
||||
#-------------------------------------------------------------------------------
|
||||
# Installing Salt
|
||||
#-------------------------------------------------------------------------------
|
||||
Write-Host "Installing Salt: " -NoNewline
|
||||
# We're setting RELENV_PIP_DIR so the binaries will be placed in the root
|
||||
if ( $SourceTarball ) {
|
||||
$InstallPath = $SourceTarball
|
||||
} else {
|
||||
$InstallPath = "."
|
||||
}
|
||||
$InstallPath
|
||||
try {
|
||||
$env:RELENV_PIP_DIR = "yes"
|
||||
Start-Process -FilePath $SCRIPTS_DIR\pip3.exe `
|
||||
-ArgumentList "install", $InstallPath `
|
||||
-WorkingDirectory "$PROJECT_DIR" `
|
||||
-Wait -WindowStyle Hidden
|
||||
} finally {
|
||||
Remove-Item env:\RELENV_PIP_DIR
|
||||
}
|
||||
if ( Test-Path -Path "$BUILD_DIR\salt-minion.exe" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
if ( $PKG ) {
|
||||
# Remove fluff
|
||||
$remove = "doc",
|
||||
"readme",
|
||||
"salt-api",
|
||||
"salt-key",
|
||||
"salt-run",
|
||||
"salt-syndic",
|
||||
"salt-unity",
|
||||
"share",
|
||||
"spm",
|
||||
"wheel"
|
||||
$remove | ForEach-Object {
|
||||
if ( Test-Path -Path "$BUILD_DIR\$_*" ) {
|
||||
Write-Host "Removing $_`: " -NoNewline
|
||||
Remove-Item -Path "$BUILD_DIR\$_*" -Recurse
|
||||
if ( ! ( Test-Path -Path "$BUILD_DIR\$_*" ) ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#-------------------------------------------------------------------------------
|
||||
# Finished
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -11,6 +11,12 @@ needed to build Python from source.
|
|||
install_vc_buildtools.ps1
|
||||
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Preferences
|
||||
|
@ -25,8 +31,12 @@ $ErrorActionPreference = "Stop"
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -9,7 +9,17 @@ This script installs the Wix Toolset and it's dependency .Net Framework 3.5
|
|||
install_wix.ps1
|
||||
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Preferences
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
@ -24,8 +34,12 @@ function ProductcodeExists($productCode) {
|
|||
}
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -18,7 +18,13 @@ param(
|
|||
# The version of Salt to be built. If this is not passed, the script will
|
||||
# attempt to get it from the git describe command on the Salt source
|
||||
# repo
|
||||
[String] $Version
|
||||
[String] $Version,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD
|
||||
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -32,8 +38,12 @@ param(
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
function VerifyOrDownload ($local_file, $URL, $SHA256) {
|
||||
|
@ -124,6 +134,26 @@ if ( ! (Test-Path -Path $WEBCACHE_DIR) ) {
|
|||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Ensure WIX environment variable is set, if not refresh and check again
|
||||
#-------------------------------------------------------------------------------
|
||||
# If wix is installed in the same session, the WIX environment variable won't be
|
||||
# defined. If it still fails, WIX may not be installed, or the WIX environment
|
||||
# variable may not be defined.
|
||||
if ( ! "$env:WIX" ) {
|
||||
Write-Host "Updating environment variables (wix): " -NoNewline
|
||||
foreach ($level in "Machine", "User") {
|
||||
$vars = [Environment]::GetEnvironmentVariables($level).GetEnumerator()
|
||||
$vars | ForEach-Object { $_ } | Set-Content -Path { "Env:$( $_.Name )" }
|
||||
}
|
||||
if ( "$env:WIX" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Caching VC++ Runtimes
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -17,7 +17,13 @@ param(
|
|||
# The version of Salt to be built. If this is not passed, the script will
|
||||
# attempt to get it from the git describe command on the Salt source
|
||||
# repo
|
||||
[String] $Version
|
||||
[String] $Version,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD
|
||||
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -33,8 +39,12 @@ $ErrorActionPreference = "Stop"
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
@ echo off
|
||||
Set "CurDir=%~dp0"
|
||||
PowerShell -ExecutionPolicy RemoteSigned -File "%CurDir%\stage_salt.ps1" %*
|
||||
PowerShell -ExecutionPolicy RemoteSigned -File "%CurDir%\prep_salt.ps1" %*
|
||||
|
|
|
@ -14,6 +14,21 @@ builds
|
|||
prep_salt.ps1
|
||||
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("b")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[String] $BuildDir,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("c")]
|
||||
# Don't pretify the output of the Write-Result
|
||||
[Switch] $CICD,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
# When true, additional routines are done to prepare for packaging.
|
||||
[Switch] $PKG
|
||||
)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Preferences
|
||||
|
@ -28,8 +43,12 @@ $ErrorActionPreference = "Stop"
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
function Write-Result($result, $ForegroundColor="Green") {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
if ( $CICD ) {
|
||||
Write-Host $result -ForegroundColor $ForegroundColor
|
||||
} else {
|
||||
$position = 80 - $result.Length - [System.Console]::CursorLeft
|
||||
Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "")
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
@ -38,7 +57,11 @@ function Write-Result($result, $ForegroundColor="Green") {
|
|||
|
||||
$PROJECT_DIR = $(git rev-parse --show-toplevel)
|
||||
$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName
|
||||
$BUILD_DIR = "$SCRIPT_DIR\buildenv"
|
||||
if ( $BuildDir ) {
|
||||
$BUILD_DIR = $BuildDir
|
||||
} else {
|
||||
$BUILD_DIR = "$SCRIPT_DIR\buildenv"
|
||||
}
|
||||
$PREREQ_DIR = "$SCRIPT_DIR\prereqs"
|
||||
$SCRIPTS_DIR = "$BUILD_DIR\Scripts"
|
||||
$BUILD_CONF_DIR = "$BUILD_DIR\configs"
|
||||
|
@ -118,23 +141,27 @@ if ( Test-Path -Path $PREREQ_DIR ) {
|
|||
# Staging the Build Environment
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
Write-Host "Copying config files from Salt: " -NoNewline
|
||||
New-Item -Path $BUILD_CONF_DIR -ItemType Directory | Out-Null
|
||||
Copy-Item -Path "$PROJECT_DIR\conf\minion" -Destination "$BUILD_CONF_DIR"
|
||||
if ( Test-Path -Path "$BUILD_CONF_DIR\minion" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
if ( $PKG ) {
|
||||
Write-Host "Copying config files from Salt: " -NoNewline
|
||||
New-Item -Path $BUILD_CONF_DIR -ItemType Directory | Out-Null
|
||||
Copy-Item -Path "$PROJECT_DIR\conf\minion" -Destination "$BUILD_CONF_DIR"
|
||||
if ( Test-Path -Path "$BUILD_CONF_DIR\minion" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "Copying SSM to Bin: " -NoNewline
|
||||
Invoke-WebRequest -Uri "$SALT_DEP_URL/ssm-2.24-103-gdee49fc.exe" -OutFile "$BUILD_DIR\ssm.exe"
|
||||
if ( Test-Path -Path "$BUILD_DIR\ssm.exe" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
if ( $PKG ) {
|
||||
Write-Host "Copying SSM to Root: " -NoNewline
|
||||
Invoke-WebRequest -Uri "$SALT_DEP_URL/ssm-2.24-103-gdee49fc.exe" -OutFile "$BUILD_DIR\ssm.exe"
|
||||
if ( Test-Path -Path "$BUILD_DIR\ssm.exe" ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null
|
||||
|
@ -162,25 +189,27 @@ if ( Test-Path -Path "$PREREQ_DIR\$file" ) {
|
|||
# Remove binaries not needed by Salt
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
$binaries = @(
|
||||
"py.exe",
|
||||
"pyw.exe",
|
||||
"pythonw.exe",
|
||||
"venvlauncher.exe",
|
||||
"venvwlauncher.exe"
|
||||
)
|
||||
Write-Host "Removing Python binaries: " -NoNewline
|
||||
$binaries | ForEach-Object {
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\$_" ) {
|
||||
# Use .net, the powershell function is asynchronous
|
||||
[System.IO.File]::Delete("$SCRIPTS_DIR\$_")
|
||||
if ( $PKG ) {
|
||||
$binaries = @(
|
||||
"py.exe",
|
||||
"pyw.exe",
|
||||
"pythonw.exe",
|
||||
"venvlauncher.exe",
|
||||
"venvwlauncher.exe"
|
||||
)
|
||||
Write-Host "Removing Python binaries: " -NoNewline
|
||||
$binaries | ForEach-Object {
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\$_" ) {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
# Use .net, the powershell function is asynchronous
|
||||
[System.IO.File]::Delete("$SCRIPTS_DIR\$_")
|
||||
if ( Test-Path -Path "$SCRIPTS_DIR\$_" ) {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
}
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Remove pywin32 components not needed by Salt
|
||||
|
|
2
requirements/build.txt
Normal file
2
requirements/build.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
twine
|
||||
build>=0.7.0
|
|
@ -1 +1,3 @@
|
|||
towncrier
|
||||
towncrier==22.12.0
|
||||
looseversion
|
||||
packaging
|
||||
|
|
|
@ -4,15 +4,26 @@
|
|||
#
|
||||
# pip-compile --output-file=requirements/static/ci/py3.10/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/static/ci/changelog.in
|
||||
#
|
||||
click==7.1.2
|
||||
click-default-group==1.2.2
|
||||
# via towncrier
|
||||
click==7.1.2
|
||||
# via
|
||||
# click-default-group
|
||||
# towncrier
|
||||
incremental==17.5.0
|
||||
# via towncrier
|
||||
jinja2==3.1.2
|
||||
# via towncrier
|
||||
looseversion==1.0.3
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
markupsafe==2.0.1
|
||||
# via jinja2
|
||||
toml==0.10.1
|
||||
# via towncrier
|
||||
towncrier==19.2.0
|
||||
packaging==23.0
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
tomli==2.0.1
|
||||
# via towncrier
|
||||
towncrier==22.12.0
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
||||
|
|
|
@ -24,7 +24,7 @@ pygments==2.13.0
|
|||
# via rich
|
||||
python-dateutil==2.8.2
|
||||
# via botocore
|
||||
python-tools-scripts==0.9.3
|
||||
python-tools-scripts==0.9.6
|
||||
# via -r requirements/static/ci/tools.in
|
||||
pyyaml==6.0
|
||||
# via -r requirements/static/ci/tools.in
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile --output-file=requirements/static/ci/py3.6/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.6/linux.txt' requirements/static/ci/changelog.in
|
||||
#
|
||||
click==7.1.1
|
||||
# via towncrier
|
||||
incremental==17.5.0
|
||||
# via towncrier
|
||||
jinja2==3.0.3
|
||||
# via towncrier
|
||||
markupsafe==2.0.1
|
||||
# via jinja2
|
||||
toml==0.10.2
|
||||
# via towncrier
|
||||
towncrier==19.2.0
|
||||
# via -r requirements/static/ci/changelog.in
|
|
@ -1,18 +0,0 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile --output-file=requirements/static/ci/py3.7/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.7/linux.txt' requirements/static/ci/changelog.in
|
||||
#
|
||||
click==7.1.1
|
||||
# via towncrier
|
||||
incremental==17.5.0
|
||||
# via towncrier
|
||||
jinja2==3.1.2
|
||||
# via towncrier
|
||||
markupsafe==2.0.1
|
||||
# via jinja2
|
||||
toml==0.10.2
|
||||
# via towncrier
|
||||
towncrier==19.2.0
|
||||
# via -r requirements/static/ci/changelog.in
|
|
@ -4,15 +4,26 @@
|
|||
#
|
||||
# pip-compile --output-file=requirements/static/ci/py3.8/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.8/linux.txt' requirements/static/ci/changelog.in
|
||||
#
|
||||
click==7.1.2
|
||||
click-default-group==1.2.2
|
||||
# via towncrier
|
||||
click==7.1.2
|
||||
# via
|
||||
# click-default-group
|
||||
# towncrier
|
||||
incremental==17.5.0
|
||||
# via towncrier
|
||||
jinja2==3.1.2
|
||||
# via towncrier
|
||||
looseversion==1.0.3
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
markupsafe==2.0.1
|
||||
# via jinja2
|
||||
toml==0.10.1
|
||||
# via towncrier
|
||||
towncrier==19.2.0
|
||||
packaging==23.0
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
tomli==2.0.1
|
||||
# via towncrier
|
||||
towncrier==22.12.0
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
||||
|
|
|
@ -4,15 +4,26 @@
|
|||
#
|
||||
# pip-compile --output-file=requirements/static/ci/py3.9/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/static/ci/changelog.in
|
||||
#
|
||||
click==7.1.2
|
||||
click-default-group==1.2.2
|
||||
# via towncrier
|
||||
click==7.1.2
|
||||
# via
|
||||
# click-default-group
|
||||
# towncrier
|
||||
incremental==17.5.0
|
||||
# via towncrier
|
||||
jinja2==3.1.2
|
||||
# via towncrier
|
||||
looseversion==1.0.3
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
markupsafe==2.0.1
|
||||
# via jinja2
|
||||
toml==0.10.1
|
||||
# via towncrier
|
||||
towncrier==19.2.0
|
||||
packaging==23.0
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
tomli==2.0.1
|
||||
# via towncrier
|
||||
towncrier==22.12.0
|
||||
# via -r requirements/static/ci/changelog.in
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
||||
|
|
|
@ -24,7 +24,7 @@ pygments==2.13.0
|
|||
# via rich
|
||||
python-dateutil==2.8.2
|
||||
# via botocore
|
||||
python-tools-scripts==0.9.3
|
||||
python-tools-scripts==0.9.6
|
||||
# via -r requirements/static/ci/tools.in
|
||||
pyyaml==6.0
|
||||
# via -r requirements/static/ci/tools.in
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
python-tools-scripts >= 0.9.3
|
||||
python-tools-scripts >= 0.9.6
|
||||
attrs
|
||||
boto3
|
||||
pyyaml
|
||||
|
|
|
@ -32,10 +32,17 @@ authenticated against. This defaults to `login`
|
|||
This should not be needed with python >= 3.3, because the `os` modules has the
|
||||
`getgrouplist` function.
|
||||
|
||||
|
||||
.. note:: This module executes itself in a subprocess in order to user the system python
|
||||
and pam libraries. We do this to avoid openssl version conflicts when
|
||||
running under a salt onedir build.
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
from ctypes import (
|
||||
CDLL,
|
||||
CFUNCTYPE,
|
||||
|
@ -52,7 +59,11 @@ from ctypes import (
|
|||
)
|
||||
from ctypes.util import find_library
|
||||
|
||||
import salt.utils.user
|
||||
HAS_USER = True
|
||||
try:
|
||||
import salt.utils.user
|
||||
except ImportError:
|
||||
HAS_USER = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -163,7 +174,7 @@ def __virtual__():
|
|||
return HAS_LIBC and HAS_PAM
|
||||
|
||||
|
||||
def authenticate(username, password):
|
||||
def _authenticate(username, password, service, encoding="utf-8"):
|
||||
"""
|
||||
Returns True if the given username and password authenticate for the
|
||||
given service. Returns False otherwise
|
||||
|
@ -172,14 +183,12 @@ def authenticate(username, password):
|
|||
|
||||
``password``: the password in plain text
|
||||
"""
|
||||
service = __opts__.get("auth.pam.service", "login")
|
||||
|
||||
if isinstance(username, str):
|
||||
username = username.encode(__salt_system_encoding__)
|
||||
username = username.encode(encoding)
|
||||
if isinstance(password, str):
|
||||
password = password.encode(__salt_system_encoding__)
|
||||
password = password.encode(encoding)
|
||||
if isinstance(service, str):
|
||||
service = service.encode(__salt_system_encoding__)
|
||||
service = service.encode(encoding)
|
||||
|
||||
@CONV_FUNC
|
||||
def my_conv(n_messages, messages, p_response, app_data):
|
||||
|
@ -214,6 +223,38 @@ def authenticate(username, password):
|
|||
return retval == 0
|
||||
|
||||
|
||||
def authenticate(username, password):
|
||||
"""
|
||||
Returns True if the given username and password authenticate for the
|
||||
given service. Returns False otherwise
|
||||
|
||||
``username``: the username to authenticate
|
||||
|
||||
``password``: the password in plain text
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
env["SALT_PAM_USERNAME"] = username
|
||||
env["SALT_PAM_PASSWORD"] = password
|
||||
env["SALT_PAM_SERVICE"] = __opts__.get("auth.pam.service", "login")
|
||||
env["SALT_PAM_ENCODING"] = __salt_system_encoding__
|
||||
pyexe = pathlib.Path(__opts__.get("auth.pam.python", "/usr/bin/python3")).resolve()
|
||||
pyfile = pathlib.Path(__file__).resolve()
|
||||
if not pyexe.exists():
|
||||
log.error("Error 'auth.pam.python' config value does not exist: %s", pyexe)
|
||||
return False
|
||||
ret = subprocess.run(
|
||||
[str(pyexe), str(pyfile)],
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False,
|
||||
)
|
||||
if ret.returncode == 0:
|
||||
return True
|
||||
log.error("Pam auth failed for %s: %s %s", username, ret.stdout, ret.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def auth(username, password, **kwargs):
|
||||
"""
|
||||
Authenticate via pam
|
||||
|
@ -228,3 +269,14 @@ def groups(username, *args, **kwargs):
|
|||
Uses system groups
|
||||
"""
|
||||
return salt.utils.user.get_group_list(username)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if _authenticate(
|
||||
os.environ["SALT_PAM_USERNAME"],
|
||||
os.environ["SALT_PAM_PASSWORD"],
|
||||
os.environ["SALT_PAM_SERVICE"],
|
||||
os.environ["SALT_PAM_ENCODING"],
|
||||
):
|
||||
sys.exit(0)
|
||||
sys.exit(1)
|
||||
|
|
|
@ -54,7 +54,7 @@ def _read_proc_file(path, opts):
|
|||
"""
|
||||
Return a dict of JID metadata, or None
|
||||
"""
|
||||
current_thread = threading.currentThread().name
|
||||
current_thread = threading.current_thread().name
|
||||
pid = os.getpid()
|
||||
with salt.utils.files.fopen(path, "rb") as fp_:
|
||||
buf = fp_.read()
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
_repo_root = pathlib.Path(__file__).parent.parent
|
||||
|
||||
if os.environ.get("ONEDIR_TESTRUN", "0") == "1":
|
||||
# In this particular case, we want to make sure that the repo root
|
||||
# is not part if sys.path so that when we import salt, we import salt from
|
||||
# the onedir and not the code checkout
|
||||
if "" in sys.path:
|
||||
sys.path.remove("")
|
||||
if str(_repo_root) in sys.path:
|
||||
sys.path.remove(str(_repo_root))
|
|
@ -18,6 +18,7 @@ import _pytest.skipping
|
|||
import psutil
|
||||
import pytest
|
||||
|
||||
import salt
|
||||
import salt._logging
|
||||
import salt._logging.mixins
|
||||
import salt.config
|
||||
|
@ -47,7 +48,8 @@ os.chdir(str(CODE_DIR))
|
|||
# Make sure the current directory is the first item in sys.path
|
||||
if str(CODE_DIR) in sys.path:
|
||||
sys.path.remove(str(CODE_DIR))
|
||||
sys.path.insert(0, str(CODE_DIR))
|
||||
if os.environ.get("ONEDIR_TESTRUN", "0") == "0":
|
||||
sys.path.insert(0, str(CODE_DIR))
|
||||
|
||||
os.environ["REPO_ROOT_DIR"] = str(CODE_DIR)
|
||||
|
||||
|
@ -222,6 +224,19 @@ def pytest_configure(config):
|
|||
called after command line options have been parsed
|
||||
and all plugins and initial conftest files been loaded.
|
||||
"""
|
||||
# try:
|
||||
# assert config._onedir_check_complete
|
||||
# return
|
||||
# except AttributeError:
|
||||
# if os.environ.get("ONEDIR_TESTRUN", "0") == "1":
|
||||
# if pathlib.Path(salt.__file__).parent == CODE_DIR / "salt":
|
||||
# raise pytest.UsageError(
|
||||
# "Apparently running the test suite against the onedir build "
|
||||
# "of salt, however, the imported salt package is pointing to "
|
||||
# "the respository checkout instead of the onedir package."
|
||||
# )
|
||||
# config._onedir_check_complete = True
|
||||
|
||||
for dirname in CODE_DIR.iterdir():
|
||||
if not dirname.is_dir():
|
||||
continue
|
||||
|
@ -277,6 +292,14 @@ def pytest_configure(config):
|
|||
"when called returns `True`. If `skip` is a callable, it should accept a single argument "
|
||||
"'grains', which is the grains dictionary.",
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers",
|
||||
"skip_initial_onedir_failure(skip=<boolean or callable, reason=None): Skip known test failures "
|
||||
"under the new onedir builds if the environment variable SKIP_INITIAL_ONEDIR_FAILURES "
|
||||
"is equal to '1' and the 'skip' keyword argument is either `True` or it's a callable that "
|
||||
"when called returns `True`. If `skip` is a callable, it should accept a single argument "
|
||||
"'grains', which is the grains dictionary.",
|
||||
)
|
||||
# "Flag" the slowTest decorator if we're skipping slow tests or not
|
||||
os.environ["SLOW_TESTS"] = str(config.getoption("--run-slow"))
|
||||
|
||||
|
@ -611,6 +634,43 @@ def pytest_runtest_setup(item):
|
|||
if skip:
|
||||
raise pytest.skip.Exception(reason, _use_item_location=True)
|
||||
|
||||
skip_initial_onedir_failures_env_set = (
|
||||
os.environ.get("SKIP_INITIAL_ONEDIR_FAILURES", "0") == "1"
|
||||
)
|
||||
skip_initial_onedir_failure_marker = item.get_closest_marker(
|
||||
"skip_initial_onedir_failure"
|
||||
)
|
||||
if (
|
||||
skip_initial_onedir_failure_marker is not None
|
||||
and skip_initial_onedir_failures_env_set
|
||||
):
|
||||
if skip_initial_onedir_failure_marker.args:
|
||||
raise pytest.UsageError(
|
||||
"'skip_initial_onedir_failure' marker does not accept any arguments "
|
||||
"only keyword arguments."
|
||||
)
|
||||
kwargs = skip_initial_onedir_failure_marker.kwargs.copy()
|
||||
skip = kwargs.pop("skip", True)
|
||||
if skip and not callable(skip) and not isinstance(skip, bool):
|
||||
raise pytest.UsageError(
|
||||
"The 'skip' keyword argument to the 'skip_initial_onedir_failure' marker "
|
||||
"requires a boolean or callable, not '{}'.".format(type(skip))
|
||||
)
|
||||
reason = kwargs.pop("reason", None)
|
||||
if reason is None:
|
||||
reason = "Test skipped because it's a know GH Actions initial failure that needs to be fixed"
|
||||
if kwargs:
|
||||
raise pytest.UsageError(
|
||||
"'skip_initial_onedir_failure' marker does not accept any keyword arguments "
|
||||
"except 'skip' and 'reason'."
|
||||
)
|
||||
if skip and callable(skip):
|
||||
grains = _grains_for_marker()
|
||||
skip = skip(grains)
|
||||
|
||||
if skip:
|
||||
raise pytest.skip.Exception(reason, _use_item_location=True)
|
||||
|
||||
requires_random_entropy_marker = item.get_closest_marker("requires_random_entropy")
|
||||
if requires_random_entropy_marker is not None:
|
||||
if requires_random_entropy_marker.args:
|
||||
|
@ -740,15 +800,22 @@ def salt_factories_config():
|
|||
start_timeout = 120
|
||||
else:
|
||||
start_timeout = 60
|
||||
|
||||
if os.environ.get("ONEDIR_TESTRUN", "0") == "1":
|
||||
code_dir = None
|
||||
else:
|
||||
code_dir = str(CODE_DIR)
|
||||
|
||||
kwargs = {
|
||||
"code_dir": str(CODE_DIR),
|
||||
"code_dir": code_dir,
|
||||
"start_timeout": start_timeout,
|
||||
"inject_sitecustomize": MAYBE_RUN_COVERAGE,
|
||||
}
|
||||
if MAYBE_RUN_COVERAGE:
|
||||
kwargs["coverage_rc_path"] = str(COVERAGERC_FILE)
|
||||
coverage_db_path = os.environ.get("COVERAGE_FILE")
|
||||
if coverage_db_path:
|
||||
kwargs["coverage_db_path"] = coverage_db_path
|
||||
else:
|
||||
kwargs["coverage_rc_path"] = None
|
||||
kwargs["coverage_db_path"] = os.environ.get("COVERAGE_FILE")
|
||||
return kwargs
|
||||
|
||||
|
||||
|
|
|
@ -1,14 +1,7 @@
|
|||
"""
|
||||
tests.integration.modules.pip
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
@ -18,7 +11,7 @@ import salt.utils.path
|
|||
import salt.utils.platform
|
||||
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import patched_environ
|
||||
from tests.support.helpers import VirtualEnv, patched_environ
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
|
||||
|
||||
|
@ -31,9 +24,6 @@ class PipModuleTest(ModuleCase):
|
|||
# Remove the venv test directory
|
||||
self.addCleanup(shutil.rmtree, self.venv_test_dir, ignore_errors=True)
|
||||
self.venv_dir = os.path.join(self.venv_test_dir, "venv")
|
||||
self.pip_temp = os.path.join(self.venv_test_dir, ".pip-temp")
|
||||
if not os.path.isdir(self.pip_temp):
|
||||
os.makedirs(self.pip_temp)
|
||||
self.patched_environ = patched_environ(
|
||||
PIP_SOURCE_DIR="",
|
||||
PIP_BUILD_DIR="",
|
||||
|
@ -42,43 +32,6 @@ class PipModuleTest(ModuleCase):
|
|||
self.patched_environ.__enter__()
|
||||
self.addCleanup(self.patched_environ.__exit__)
|
||||
|
||||
def _create_virtualenv(self, path):
|
||||
"""
|
||||
The reason why the virtualenv creation is proxied by this function is mostly
|
||||
because under windows, we can't seem to properly create a virtualenv off of
|
||||
another virtualenv(we can on linux) and also because, we really don't want to
|
||||
test virtualenv creation off of another virtualenv, we want a virtualenv created
|
||||
from the original python.
|
||||
Also, one windows, we must also point to the virtualenv binary outside the existing
|
||||
virtualenv because it will fail otherwise
|
||||
"""
|
||||
try:
|
||||
if salt.utils.platform.is_windows():
|
||||
python = os.path.join(sys.real_prefix, os.path.basename(sys.executable))
|
||||
else:
|
||||
python_binary_names = [
|
||||
"python{}.{}".format(*sys.version_info),
|
||||
"python{}".format(*sys.version_info),
|
||||
"python",
|
||||
]
|
||||
for binary_name in python_binary_names:
|
||||
python = os.path.join(sys.real_prefix, "bin", binary_name)
|
||||
if os.path.exists(python):
|
||||
break
|
||||
else:
|
||||
self.fail(
|
||||
"Couldn't find a python binary name under '{}' matching: {}".format(
|
||||
os.path.join(sys.real_prefix, "bin"), python_binary_names
|
||||
)
|
||||
)
|
||||
# We're running off a virtualenv, and we don't want to create a virtualenv off of
|
||||
# a virtualenv
|
||||
kwargs = {"python": python}
|
||||
except AttributeError:
|
||||
# We're running off of the system python
|
||||
kwargs = {}
|
||||
self.run_function("virtualenv.create", [path], **kwargs)
|
||||
|
||||
def _check_download_error(self, ret):
|
||||
"""
|
||||
Checks to see if a download error looks transitory
|
||||
|
@ -115,477 +68,487 @@ class PipModuleTest(ModuleCase):
|
|||
@pytest.mark.slow_test
|
||||
def test_issue_2087_missing_pip(self):
|
||||
# Let's create the testing virtualenv
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
# Let's remove the pip binary
|
||||
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
|
||||
site_dir = self.run_function(
|
||||
"virtualenv.get_distribution_path", [self.venv_dir, "pip"]
|
||||
)
|
||||
if salt.utils.platform.is_windows():
|
||||
pip_bin = os.path.join(self.venv_dir, "Scripts", "pip.exe")
|
||||
site_dir = os.path.join(self.venv_dir, "lib", "site-packages")
|
||||
if not os.path.isfile(pip_bin):
|
||||
self.skipTest("Failed to find the pip binary to the test virtualenv")
|
||||
os.remove(pip_bin)
|
||||
|
||||
# Also remove the pip dir from site-packages
|
||||
# This is needed now that we're using python -m pip instead of the
|
||||
# pip binary directly. python -m pip will still work even if the
|
||||
# pip binary is missing
|
||||
shutil.rmtree(os.path.join(site_dir, "pip"))
|
||||
|
||||
# Let's run a pip depending functions
|
||||
for func in ("pip.freeze", "pip.list"):
|
||||
ret = self.run_function(func, bin_env=self.venv_dir)
|
||||
self.assertIn(
|
||||
"Command required for '{}' not found: "
|
||||
"Could not find a `pip` binary".format(func),
|
||||
ret,
|
||||
# Let's remove the pip binary
|
||||
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
|
||||
site_dir = self.run_function(
|
||||
"virtualenv.get_distribution_path", [self.venv_dir, "pip"]
|
||||
)
|
||||
if salt.utils.platform.is_windows():
|
||||
pip_bin = os.path.join(self.venv_dir, "Scripts", "pip.exe")
|
||||
site_dir = os.path.join(self.venv_dir, "lib", "site-packages")
|
||||
if not os.path.isfile(pip_bin):
|
||||
self.skipTest("Failed to find the pip binary to the test virtualenv")
|
||||
os.remove(pip_bin)
|
||||
|
||||
# Also remove the pip dir from site-packages
|
||||
# This is needed now that we're using python -m pip instead of the
|
||||
# pip binary directly. python -m pip will still work even if the
|
||||
# pip binary is missing
|
||||
shutil.rmtree(os.path.join(site_dir, "pip"))
|
||||
|
||||
# Let's run a pip depending functions
|
||||
for func in ("pip.freeze", "pip.list"):
|
||||
ret = self.run_function(func, bin_env=self.venv_dir)
|
||||
assert (
|
||||
"Command required for '{}' not found: Could not find a `pip` binary".format(
|
||||
func
|
||||
)
|
||||
in ret
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_requirements_as_list_of_chains__cwd_set__absolute_file_path(self):
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
# Create a requirements file that depends on another one.
|
||||
# Create a requirements file that depends on another one.
|
||||
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
|
||||
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
|
||||
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
|
||||
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements1b.txt\n")
|
||||
with salt.utils.files.fopen(req1b_filename, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("-r requirements2b.txt\n")
|
||||
with salt.utils.files.fopen(req2b_filename, "w") as f:
|
||||
f.write("pep8\n")
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements1b.txt\n")
|
||||
with salt.utils.files.fopen(req1b_filename, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("-r requirements2b.txt\n")
|
||||
with salt.utils.files.fopen(req2b_filename, "w") as f:
|
||||
f.write("pep8\n")
|
||||
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=requirements_list,
|
||||
bin_env=self.venv_dir,
|
||||
cwd=self.venv_dir,
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=requirements_list,
|
||||
bin_env=self.venv_dir,
|
||||
cwd=self.venv_dir,
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
self.assertTrue(found)
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
assert found
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_requirements_as_list_of_chains__cwd_not_set__absolute_file_path(self):
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
# Create a requirements file that depends on another one.
|
||||
# Create a requirements file that depends on another one.
|
||||
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
|
||||
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements1.txt")
|
||||
req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt")
|
||||
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements1b.txt\n")
|
||||
with salt.utils.files.fopen(req1b_filename, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("-r requirements2b.txt\n")
|
||||
with salt.utils.files.fopen(req2b_filename, "w") as f:
|
||||
f.write("pep8\n")
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements1b.txt\n")
|
||||
with salt.utils.files.fopen(req1b_filename, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("-r requirements2b.txt\n")
|
||||
with salt.utils.files.fopen(req2b_filename, "w") as f:
|
||||
f.write("pep8\n")
|
||||
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
ret = self.run_function(
|
||||
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
self.assertTrue(found)
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
assert found
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_requirements_as_list__absolute_file_path(self):
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("pep8\n")
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("pep8\n")
|
||||
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
ret = self.run_function(
|
||||
"pip.install", requirements=requirements_list, bin_env=self.venv_dir
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
self.assertTrue(found)
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
assert found
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_requirements_as_list__non_absolute_file_path(self):
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
# Create a requirements file that depends on another one.
|
||||
# Create a requirements file that depends on another one.
|
||||
|
||||
req1_filename = "requirements.txt"
|
||||
req2_filename = "requirements2.txt"
|
||||
req_cwd = self.venv_dir
|
||||
req1_filename = "requirements.txt"
|
||||
req2_filename = "requirements2.txt"
|
||||
req_cwd = self.venv_dir
|
||||
|
||||
req1_filepath = os.path.join(req_cwd, req1_filename)
|
||||
req2_filepath = os.path.join(req_cwd, req2_filename)
|
||||
req1_filepath = os.path.join(req_cwd, req1_filename)
|
||||
req2_filepath = os.path.join(req_cwd, req2_filename)
|
||||
|
||||
with salt.utils.files.fopen(req1_filepath, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filepath, "w") as f:
|
||||
f.write("pep8\n")
|
||||
with salt.utils.files.fopen(req1_filepath, "w") as f:
|
||||
f.write("irc3-plugins-test\n")
|
||||
with salt.utils.files.fopen(req2_filepath, "w") as f:
|
||||
f.write("pep8\n")
|
||||
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
requirements_list = [req1_filename, req2_filename]
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=requirements_list,
|
||||
bin_env=self.venv_dir,
|
||||
cwd=req_cwd,
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=requirements_list,
|
||||
bin_env=self.venv_dir,
|
||||
cwd=req_cwd,
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
self.assertTrue(found)
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
found = self.pip_successful_install(ret["stdout"])
|
||||
assert found
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_chained_requirements__absolute_file_path(self):
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
# Create a requirements file that depends on another one.
|
||||
# Create a requirements file that depends on another one.
|
||||
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements2.txt")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("pep8")
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements2.txt")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("pep8")
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install", requirements=req1_filename, bin_env=self.venv_dir
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
ret = self.run_function(
|
||||
"pip.install", requirements=req1_filename, bin_env=self.venv_dir
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("installed pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
assert "installed pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_chained_requirements__non_absolute_file_path(self):
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
# Create a requirements file that depends on another one.
|
||||
req_basepath = self.venv_dir
|
||||
# Create a requirements file that depends on another one.
|
||||
req_basepath = self.venv_dir
|
||||
|
||||
req1_filename = "requirements.txt"
|
||||
req2_filename = "requirements2.txt"
|
||||
req1_filename = "requirements.txt"
|
||||
req2_filename = "requirements2.txt"
|
||||
|
||||
req1_file = os.path.join(self.venv_dir, req1_filename)
|
||||
req2_file = os.path.join(self.venv_dir, req2_filename)
|
||||
req1_file = os.path.join(self.venv_dir, req1_filename)
|
||||
req2_file = os.path.join(self.venv_dir, req2_filename)
|
||||
|
||||
with salt.utils.files.fopen(req1_file, "w") as f:
|
||||
f.write("-r requirements2.txt")
|
||||
with salt.utils.files.fopen(req2_file, "w") as f:
|
||||
f.write("pep8")
|
||||
with salt.utils.files.fopen(req1_file, "w") as f:
|
||||
f.write("-r requirements2.txt")
|
||||
with salt.utils.files.fopen(req2_file, "w") as f:
|
||||
f.write("pep8")
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=req1_filename,
|
||||
cwd=req_basepath,
|
||||
bin_env=self.venv_dir,
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=req1_filename,
|
||||
cwd=req_basepath,
|
||||
bin_env=self.venv_dir,
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("installed pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
assert "installed pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_issue_4805_nested_requirements(self):
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
|
||||
# Create a requirements file that depends on another one.
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements2.txt")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("pep8")
|
||||
# Create a requirements file that depends on another one.
|
||||
req1_filename = os.path.join(self.venv_dir, "requirements.txt")
|
||||
req2_filename = os.path.join(self.venv_dir, "requirements2.txt")
|
||||
with salt.utils.files.fopen(req1_filename, "w") as f:
|
||||
f.write("-r requirements2.txt")
|
||||
with salt.utils.files.fopen(req2_filename, "w") as f:
|
||||
f.write("pep8")
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=req1_filename,
|
||||
bin_env=self.venv_dir,
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
requirements=req1_filename,
|
||||
bin_env=self.venv_dir,
|
||||
timeout=300,
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("installed pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
assert ret["retcode"] == 0
|
||||
assert "installed pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_pip_uninstall(self):
|
||||
# Let's create the testing virtualenv
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
ret = self.run_function("pip.install", ["pep8"], bin_env=self.venv_dir)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
ret = self.run_function("pip.install", ["pep8"], bin_env=self.venv_dir)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("installed pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
assert ret["retcode"] == 0
|
||||
assert "installed pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.uninstall' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.uninstall' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("uninstalled pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
assert "uninstalled pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_pip_install_upgrade(self):
|
||||
# Create the testing virtualenv
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
ret = self.run_function("pip.install", ["pep8==1.3.4"], bin_env=self.venv_dir)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
ret = self.run_function(
|
||||
"pip.install", ["pep8==1.3.4"], bin_env=self.venv_dir
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("installed pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
assert ret["retcode"] == 0
|
||||
assert "installed pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install", ["pep8"], bin_env=self.venv_dir, upgrade=True
|
||||
)
|
||||
|
||||
ret = self.run_function(
|
||||
"pip.install", ["pep8"], bin_env=self.venv_dir, upgrade=True
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
assert ret["retcode"] == 0
|
||||
assert "installed pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("installed pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
|
||||
|
||||
ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.uninstall' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.uninstall' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
self.assertIn("uninstalled pep8", ret["stdout"])
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
try:
|
||||
assert ret["retcode"] == 0
|
||||
assert "uninstalled pep8" in ret["stdout"]
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_pip_install_multiple_editables(self):
|
||||
editables = [
|
||||
"git+https://github.com/jek/blinker.git#egg=Blinker",
|
||||
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
|
||||
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
|
||||
]
|
||||
|
||||
# Create the testing virtualenv
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
[],
|
||||
editable="{}".format(",".join(editables)),
|
||||
bin_env=self.venv_dir,
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
[],
|
||||
editable="{}".format(",".join(editables)),
|
||||
bin_env=self.venv_dir,
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
match = re.search(
|
||||
"Successfully installed Blinker(.*) SaltTesting(.*)", ret["stdout"]
|
||||
)
|
||||
assert match is not None
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
assert ret["retcode"] == 0
|
||||
for package in ("iStr", "SaltTesting"):
|
||||
match = re.search(
|
||||
r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format(
|
||||
package
|
||||
),
|
||||
ret["stdout"],
|
||||
)
|
||||
assert match is not None
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_pip_install_multiple_editables_and_pkgs(self):
|
||||
editables = [
|
||||
"git+https://github.com/jek/blinker.git#egg=Blinker",
|
||||
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
|
||||
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
|
||||
]
|
||||
|
||||
# Create the testing virtualenv
|
||||
self._create_virtualenv(self.venv_dir)
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
["pep8"],
|
||||
editable="{}".format(",".join(editables)),
|
||||
bin_env=self.venv_dir,
|
||||
)
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
with VirtualEnv(self.venv_dir):
|
||||
ret = self.run_function(
|
||||
"pip.install",
|
||||
["pep8"],
|
||||
editable="{}".format(",".join(editables)),
|
||||
bin_env=self.venv_dir,
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
self.assertEqual(ret["retcode"], 0)
|
||||
for package in ("Blinker", "SaltTesting", "pep8"):
|
||||
self.assertRegex(
|
||||
ret["stdout"],
|
||||
r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format(package),
|
||||
if not isinstance(ret, dict):
|
||||
self.fail(
|
||||
"The 'pip.install' command did not return the excepted dictionary."
|
||||
" Output:\n{}".format(ret)
|
||||
)
|
||||
|
||||
try:
|
||||
if self._check_download_error(ret["stdout"]):
|
||||
self.skipTest("Test skipped due to pip download error")
|
||||
assert ret["retcode"] == 0
|
||||
for package in ("iStr", "SaltTesting", "pep8"):
|
||||
match = re.search(
|
||||
r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format(
|
||||
package
|
||||
),
|
||||
ret["stdout"],
|
||||
)
|
||||
assert match is not None
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
except KeyError as exc:
|
||||
self.fail(
|
||||
"The returned dictionary is missing an expected key. Error: '{}'."
|
||||
" Dictionary: {}".format(exc, pprint.pformat(ret))
|
||||
)
|
||||
|
||||
@pytest.mark.skipif(
|
||||
shutil.which("/bin/pip3") is None, reason="Could not find /bin/pip3"
|
||||
|
|
69
tests/pytests/functional/modules/test_saltcheck.py
Normal file
69
tests/pytests/functional/modules/test_saltcheck.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def saltcheck(modules):
|
||||
return modules.saltcheck
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_saltcheck_render_pyobjects_state(state_tree, saltcheck):
|
||||
with pytest.helpers.temp_file("pyobj_touched.txt") as tpath:
|
||||
sls_content = f"""
|
||||
#!pyobjects
|
||||
|
||||
File.touch("{tpath}")
|
||||
"""
|
||||
tst_content = f"""
|
||||
is_stuff_there:
|
||||
module_and_function: file.file_exists
|
||||
args:
|
||||
- "{tpath}"
|
||||
assertion: assertTrue
|
||||
"""
|
||||
with pytest.helpers.temp_file(
|
||||
"pyobj_touched/init.sls", sls_content, state_tree
|
||||
), pytest.helpers.temp_file(
|
||||
"pyobj_touched/saltcheck-tests/init.tst", tst_content, state_tree
|
||||
):
|
||||
ret = saltcheck.run_state_tests("pyobj_touched")
|
||||
assert ret[0]["pyobj_touched"]["is_stuff_there"]["status"] == "Pass"
|
||||
assert ret[1]["TEST RESULTS"]["Passed"] == 1
|
||||
assert ret[1]["TEST RESULTS"]["Missing Tests"] == 0
|
||||
assert ret[1]["TEST RESULTS"]["Failed"] == 0
|
||||
assert ret[1]["TEST RESULTS"]["Skipped"] == 0
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_saltcheck_allow_remote_fileclient(state_tree, saltcheck):
|
||||
sls_content = """
|
||||
test_state:
|
||||
test.show_notification:
|
||||
- text: The test state
|
||||
"""
|
||||
|
||||
tst_content = """
|
||||
test cp.cache_file:
|
||||
module_and_function: cp.cache_file
|
||||
args:
|
||||
- salt://sltchk_remote/download_me.txt
|
||||
kwargs:
|
||||
saltenv: base
|
||||
assertion: assertNotEmpty
|
||||
output_details: True
|
||||
"""
|
||||
|
||||
with pytest.helpers.temp_file(
|
||||
"sltchk_remote/init.sls", sls_content, state_tree
|
||||
), pytest.helpers.temp_file(
|
||||
"sltchk_remote/saltcheck-tests/init.tst", tst_content, state_tree
|
||||
), pytest.helpers.temp_file(
|
||||
"sltchk_remote/download_me.txt", "salty", state_tree
|
||||
):
|
||||
|
||||
ret = saltcheck.run_state_tests("sltchk_remote")
|
||||
assert ret[0]["sltchk_remote"]["test cp.cache_file"]["status"] == "Pass"
|
||||
assert ret[1]["TEST RESULTS"]["Passed"] == 1
|
||||
assert ret[1]["TEST RESULTS"]["Missing Tests"] == 0
|
||||
assert ret[1]["TEST RESULTS"]["Failed"] == 0
|
||||
assert ret[1]["TEST RESULTS"]["Skipped"] == 0
|
|
@ -134,6 +134,7 @@ def test_exit_status_correct_usage(salt_cli, salt_minion):
|
|||
|
||||
@pytest.mark.slow_test
|
||||
@pytest.mark.skip_on_windows(reason="Windows does not support SIGINT")
|
||||
@pytest.mark.skip_initial_onedir_failure
|
||||
def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion):
|
||||
"""
|
||||
Ensure that a call to ``salt`` that is taking too long, when a user
|
||||
|
|
|
@ -106,6 +106,7 @@ def config(source_path):
|
|||
master_config_path = master_dir / "master"
|
||||
master_config_path.write_text(
|
||||
"""
|
||||
auth.pam.python: /usr/local/bin/python3
|
||||
order_masters: True
|
||||
|
||||
publisher_acl:
|
||||
|
@ -136,6 +137,7 @@ nodegroups:
|
|||
syndic_a_master_config_path = syndic_a_dir / "master"
|
||||
syndic_a_master_config_path.write_text(
|
||||
"""
|
||||
auth.pam.python: /usr/local/bin/python3
|
||||
syndic_master: master
|
||||
publisher_acl:
|
||||
bob:
|
||||
|
|
|
@ -55,7 +55,7 @@ def salt_cli(salt_master):
|
|||
The ``salt`` CLI as a fixture against the running master
|
||||
"""
|
||||
assert salt_master.is_running()
|
||||
return salt_master.salt_cli()
|
||||
return salt_master.salt_cli(timeout=30)
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
|
@ -64,7 +64,7 @@ def salt_call_cli(salt_minion):
|
|||
The ``salt-call`` CLI as a fixture against the running minion
|
||||
"""
|
||||
assert salt_minion.is_running()
|
||||
return salt_minion.salt_call_cli()
|
||||
return salt_minion.salt_call_cli(timeout=30)
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
|
@ -73,7 +73,7 @@ def salt_cp_cli(salt_master):
|
|||
The ``salt-cp`` CLI as a fixture against the running master
|
||||
"""
|
||||
assert salt_master.is_running()
|
||||
return salt_master.salt_cp_cli()
|
||||
return salt_master.salt_cp_cli(timeout=30)
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
|
@ -82,7 +82,7 @@ def salt_key_cli(salt_master):
|
|||
The ``salt-key`` CLI as a fixture against the running master
|
||||
"""
|
||||
assert salt_master.is_running()
|
||||
return salt_master.salt_key_cli()
|
||||
return salt_master.salt_key_cli(timeout=30)
|
||||
|
||||
|
||||
@pytest.fixture(scope="package")
|
||||
|
@ -91,7 +91,7 @@ def salt_run_cli(salt_master):
|
|||
The ``salt-run`` CLI as a fixture against the running master
|
||||
"""
|
||||
assert salt_master.is_running()
|
||||
return salt_master.salt_run_cli()
|
||||
return salt_master.salt_run_cli(timeout=30)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
|
|
@ -1,82 +0,0 @@
|
|||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_saltcheck_render_pyobjects_state(salt_master, salt_call_cli):
|
||||
with pytest.helpers.temp_file("pyobj_touched.txt") as tpath:
|
||||
sls_content = """
|
||||
#!pyobjects
|
||||
|
||||
File.touch("{}")
|
||||
""".format(
|
||||
tpath
|
||||
)
|
||||
|
||||
tst_content = """
|
||||
is_stuff_there:
|
||||
module_and_function: file.file_exists
|
||||
args:
|
||||
- "{}"
|
||||
assertion: assertTrue
|
||||
""".format(
|
||||
tpath
|
||||
)
|
||||
|
||||
with salt_master.state_tree.base.temp_file(
|
||||
"pyobj_touched/init.sls", sls_content
|
||||
):
|
||||
with salt_master.state_tree.base.temp_file(
|
||||
"pyobj_touched/saltcheck-tests/init.tst", tst_content
|
||||
):
|
||||
ret = salt_call_cli.run(
|
||||
"--local",
|
||||
"saltcheck.run_state_tests",
|
||||
"pyobj_touched",
|
||||
)
|
||||
assert (
|
||||
ret.data[0]["pyobj_touched"]["is_stuff_there"]["status"] == "Pass"
|
||||
)
|
||||
assert ret.data[1]["TEST RESULTS"]["Passed"] == 1
|
||||
assert ret.data[1]["TEST RESULTS"]["Missing Tests"] == 0
|
||||
assert ret.data[1]["TEST RESULTS"]["Failed"] == 0
|
||||
assert ret.data[1]["TEST RESULTS"]["Skipped"] == 0
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_saltcheck_allow_remote_fileclient(salt_master, salt_call_cli):
|
||||
sls_content = """
|
||||
test_state:
|
||||
test.show_notification:
|
||||
- text: The test state
|
||||
"""
|
||||
|
||||
tst_content = """
|
||||
test cp.cache_file:
|
||||
module_and_function: cp.cache_file
|
||||
args:
|
||||
- salt://sltchk_remote/download_me.txt
|
||||
kwargs:
|
||||
saltenv: base
|
||||
assertion: assertNotEmpty
|
||||
output_details: True
|
||||
"""
|
||||
|
||||
with salt_master.state_tree.base.temp_file("sltchk_remote/init.sls", sls_content):
|
||||
with salt_master.state_tree.base.temp_file(
|
||||
"sltchk_remote/saltcheck-tests/init.tst", tst_content
|
||||
):
|
||||
with salt_master.state_tree.base.temp_file(
|
||||
"sltchk_remote/download_me.txt", "salty"
|
||||
):
|
||||
ret = salt_call_cli.run(
|
||||
"saltcheck.run_state_tests",
|
||||
"sltchk_remote",
|
||||
)
|
||||
assert (
|
||||
ret.data[0]["sltchk_remote"]["test cp.cache_file"]["status"]
|
||||
== "Pass"
|
||||
)
|
||||
assert ret.data[1]["TEST RESULTS"]["Passed"] == 1
|
||||
assert ret.data[1]["TEST RESULTS"]["Missing Tests"] == 0
|
||||
assert ret.data[1]["TEST RESULTS"]["Failed"] == 0
|
||||
assert ret.data[1]["TEST RESULTS"]["Skipped"] == 0
|
|
@ -514,7 +514,7 @@ def test_orchestrate_subset(
|
|||
with salt_master.state_tree.base.temp_file(
|
||||
"orch/subset.sls", sls_contents
|
||||
), salt_master.state_tree.base.temp_file("test.sls", test_sls):
|
||||
ret = salt_run_cli.run("state.orchestrate", "orch.subset")
|
||||
ret = salt_run_cli.run("state.orchestrate", "orch.subset", _timeout=60)
|
||||
assert ret.returncode == 0
|
||||
|
||||
for state_data in ret.data["data"][salt_master.id].values():
|
||||
|
|
|
@ -19,6 +19,7 @@ log = logging.getLogger(__name__)
|
|||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.windows_whitelisted,
|
||||
pytest.mark.skip_initial_onedir_failure,
|
||||
pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False),
|
||||
]
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ pytestmark = [
|
|||
pytest.mark.slow_test,
|
||||
pytest.mark.skip_on_windows,
|
||||
pytest.mark.skip_on_aix,
|
||||
pytest.mark.skip_initial_onedir_failure,
|
||||
pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False),
|
||||
]
|
||||
|
||||
|
|
|
@ -29,9 +29,19 @@ def mock_pam():
|
|||
|
||||
def test_cve_if_pam_acct_mgmt_returns_nonzero_authenticate_should_be_false(mock_pam):
|
||||
with patch("salt.auth.pam.PAM_ACCT_MGMT", autospec=True, return_value=42):
|
||||
assert salt.auth.pam.authenticate(username="fnord", password="fnord") is False
|
||||
assert (
|
||||
salt.auth.pam._authenticate(
|
||||
username="fnord", password="fnord", service="login", encoding="utf-8"
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
|
||||
def test_if_pam_acct_mgmt_returns_zero_authenticate_should_be_true(mock_pam):
|
||||
with patch("salt.auth.pam.PAM_ACCT_MGMT", autospec=True, return_value=0):
|
||||
assert salt.auth.pam.authenticate(username="fnord", password="fnord") is True
|
||||
assert (
|
||||
salt.auth.pam._authenticate(
|
||||
username="fnord", password="fnord", service="login", encoding="utf-8"
|
||||
)
|
||||
is True
|
||||
)
|
||||
|
|
|
@ -156,7 +156,7 @@ def test_install_editable_without_egg_fails():
|
|||
|
||||
def test_install_multiple_editable():
|
||||
editables = [
|
||||
"git+https://github.com/jek/blinker.git#egg=Blinker",
|
||||
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
|
||||
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
|
||||
]
|
||||
|
||||
|
@ -192,7 +192,7 @@ def test_install_multiple_editable():
|
|||
def test_install_multiple_pkgs_and_editables():
|
||||
pkgs = ["pep8", "salt"]
|
||||
editables = [
|
||||
"git+https://github.com/jek/blinker.git#egg=Blinker",
|
||||
"git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr",
|
||||
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
|
||||
]
|
||||
|
||||
|
|
|
@ -1601,15 +1601,26 @@ class VirtualEnv:
|
|||
venv_dir = attr.ib(converter=_cast_to_pathlib_path)
|
||||
env = attr.ib(default=None)
|
||||
system_site_packages = attr.ib(default=False)
|
||||
pip_requirement = attr.ib(default="pip>=20.2.4,<21.2", repr=False)
|
||||
setuptools_requirement = attr.ib(
|
||||
default="setuptools!=50.*,!=51.*,!=52.*", repr=False
|
||||
)
|
||||
pip_requirement = attr.ib(repr=False)
|
||||
setuptools_requirement = attr.ib(repr=False)
|
||||
# TBD build_requirement = attr.ib(default="build!=0.6.*", repr=False) # add build when implement pyproject.toml
|
||||
environ = attr.ib(init=False, repr=False)
|
||||
venv_python = attr.ib(init=False, repr=False)
|
||||
venv_bin_dir = attr.ib(init=False, repr=False)
|
||||
|
||||
@pip_requirement.default
|
||||
def _default_pip_requiremnt(self):
|
||||
if os.environ.get("ONEDIR_TESTRUN", "0") == "1":
|
||||
return "pip>=22.3.1,<23.0"
|
||||
return "pip>=20.2.4,<21.2"
|
||||
|
||||
@setuptools_requirement.default
|
||||
def _default_setuptools_requirement(self):
|
||||
if os.environ.get("ONEDIR_TESTRUN", "0") == "1":
|
||||
# https://github.com/pypa/setuptools/commit/137ab9d684075f772c322f455b0dd1f992ddcd8f
|
||||
return "setuptools>=65.6.3,<66"
|
||||
return "setuptools!=50.*,!=51.*,!=52.*,<59"
|
||||
|
||||
@venv_dir.default
|
||||
def _default_venv_dir(self):
|
||||
return pathlib.Path(tempfile.mkdtemp(dir=RUNTIME_VARS.TMP))
|
||||
|
|
|
@ -45,6 +45,7 @@ class SaltVirtMinionContainerFactory(SaltMinion):
|
|||
"NO_START_MINION": "1",
|
||||
"HOST_UUID": self.host_uuid,
|
||||
"PYTHONDONTWRITEBYTECODE": "1",
|
||||
"PYTHONPATH": str(CODE_DIR),
|
||||
}
|
||||
)
|
||||
super().__attrs_post_init__()
|
||||
|
|
|
@ -261,6 +261,7 @@ class VTTestCase(TestCase):
|
|||
def generate_multibyte_stderr_unicode(block_size):
|
||||
return b"\x2E" + VTTestCase.generate_multibyte_stdout_unicode(block_size)
|
||||
|
||||
@pytest.mark.skip_initial_onedir_failure
|
||||
@pytest.mark.skip_on_windows(reason="Skip VT tests on windows, due to issue 54290")
|
||||
@fixStdOutErrFileNoIfNeeded
|
||||
def test_split_multibyte_characters_unicode(self):
|
||||
|
@ -331,6 +332,7 @@ class VTTestCase(TestCase):
|
|||
def generate_multibyte_stderr_shiftjis(block_size):
|
||||
return b"\x2E" + VTTestCase.generate_multibyte_stdout_shiftjis(block_size)
|
||||
|
||||
@pytest.mark.skip_initial_onedir_failure
|
||||
@pytest.mark.skip_on_windows(reason="Skip VT tests on windows, due to issue 54290")
|
||||
@fixStdOutErrFileNoIfNeeded
|
||||
def test_split_multibyte_characters_shiftjis(self):
|
||||
|
|
63
tools/pkg.py
63
tools/pkg.py
|
@ -5,6 +5,8 @@ These commands are used to build Salt packages.
|
|||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
|
@ -118,7 +120,7 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
|
|||
|
||||
When running on Windows and macOS, some additional cleanup is also done.
|
||||
"""
|
||||
with open(str(REPO_ROOT / "cicd" / "env-cleanup-files.yml")) as rfh:
|
||||
with open(str(REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")) as rfh:
|
||||
patterns = yaml.safe_load(rfh.read())
|
||||
|
||||
if pkg:
|
||||
|
@ -133,18 +135,19 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
|
|||
else:
|
||||
patterns = patterns["linux"]
|
||||
|
||||
def unnest_lists(patterns):
|
||||
if isinstance(patterns, list):
|
||||
for pattern in patterns:
|
||||
yield from unnest_lists(pattern)
|
||||
else:
|
||||
yield patterns
|
||||
|
||||
dir_patterns = set()
|
||||
for pattern in patterns["dir_patterns"]:
|
||||
if isinstance(pattern, list):
|
||||
dir_patterns.update(set(pattern))
|
||||
continue
|
||||
for pattern in unnest_lists(patterns["dir_patterns"]):
|
||||
dir_patterns.add(pattern)
|
||||
|
||||
file_patterns = set()
|
||||
for pattern in patterns["file_patterns"]:
|
||||
if isinstance(pattern, list):
|
||||
file_patterns.update(set(pattern))
|
||||
continue
|
||||
for pattern in unnest_lists(patterns["file_patterns"]):
|
||||
file_patterns.add(pattern)
|
||||
|
||||
for root, dirs, files in os.walk(cleanup_path, topdown=True, followlinks=False):
|
||||
|
@ -175,3 +178,45 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
|
|||
except FileNotFoundError:
|
||||
pass
|
||||
break
|
||||
|
||||
|
||||
@pkg.command(
|
||||
name="generate-hashes",
|
||||
arguments={
|
||||
"files": {
|
||||
"help": "The files to generate the hashes for.",
|
||||
"nargs": "*",
|
||||
},
|
||||
},
|
||||
)
|
||||
def generate_hashes(ctx: Context, files: list[pathlib.Path]):
|
||||
"""
|
||||
Generate "blake2b", "sha512" and "sha3_512" hashes for the passed files.
|
||||
"""
|
||||
for fpath in files:
|
||||
ctx.info(f"* Processing {fpath} ...")
|
||||
hashes = {}
|
||||
for hash_name in ("blake2b", "sha512", "sha3_512"):
|
||||
ctx.info(f" * Calculating {hash_name} ...")
|
||||
with fpath.open("rb") as rfh:
|
||||
try:
|
||||
digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined]
|
||||
except AttributeError:
|
||||
# Python < 3.11
|
||||
buf = bytearray(2**18) # Reusable buffer to reduce allocations.
|
||||
view = memoryview(buf)
|
||||
digest = getattr(hashlib, hash_name)()
|
||||
while True:
|
||||
size = rfh.readinto(buf)
|
||||
if size == 0:
|
||||
break # EOF
|
||||
digest.update(view[:size])
|
||||
digest_file_path = fpath.parent / f"{fpath.name}.{hash_name.upper()}"
|
||||
hexdigest = digest.hexdigest()
|
||||
ctx.info(f" * Writing {digest_file_path} ...")
|
||||
digest_file_path.write_text(digest.hexdigest())
|
||||
hashes[hash_name] = hexdigest
|
||||
hashes_json_path = fpath.parent / f"{fpath.name}.json"
|
||||
ctx.info(f" * Writing {hashes_json_path} ...")
|
||||
hashes_json_path.write_text(json.dumps(hashes))
|
||||
ctx.info("Done")
|
||||
|
|
28
tools/vm.py
28
tools/vm.py
|
@ -244,6 +244,7 @@ def test(
|
|||
rerun_failures: bool = False,
|
||||
skip_requirements_install: bool = False,
|
||||
print_tests_selection: bool = False,
|
||||
print_system_info: bool = False,
|
||||
skip_code_coverage: bool = False,
|
||||
):
|
||||
"""
|
||||
|
@ -252,6 +253,7 @@ def test(
|
|||
vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region)
|
||||
env = {
|
||||
"PRINT_TEST_PLAN_ONLY": "0",
|
||||
"SKIP_INITIAL_ONEDIR_FAILURES": "1",
|
||||
"SKIP_INITIAL_GH_ACTIONS_FAILURES": "1",
|
||||
}
|
||||
if rerun_failures:
|
||||
|
@ -264,6 +266,10 @@ def test(
|
|||
env["SKIP_CODE_COVERAGE"] = "1"
|
||||
else:
|
||||
env["SKIP_CODE_COVERAGE"] = "0"
|
||||
if print_system_info:
|
||||
env["PRINT_SYSTEM_INFO"] = "1"
|
||||
else:
|
||||
env["PRINT_SYSTEM_INFO"] = "0"
|
||||
if (
|
||||
skip_requirements_install
|
||||
or os.environ.get("SKIP_REQUIREMENTS_INSTALL", "0") == "1"
|
||||
|
@ -976,11 +982,17 @@ class VM:
|
|||
"--exclude",
|
||||
".pytest_cache/",
|
||||
"--exclude",
|
||||
"artifacts/",
|
||||
"--exclude",
|
||||
f"{STATE_DIR.relative_to(REPO_ROOT)}{os.path.sep}",
|
||||
"--exclude",
|
||||
"*.py~",
|
||||
# We need to include artifacts/ to be able to include artifacts/salt
|
||||
"--include",
|
||||
"artifacts/",
|
||||
"--include",
|
||||
"artifacts/salt",
|
||||
# But we also want to exclude all other entries under artifacts/
|
||||
"--exclude",
|
||||
"artifacts/*",
|
||||
]
|
||||
if self.is_windows:
|
||||
# Symlinks aren't handled properly on windows, just replace the
|
||||
|
@ -1141,7 +1153,17 @@ class VM:
|
|||
source = f"{self.name}:{remote_path}/"
|
||||
destination = "artifacts/"
|
||||
description = f"Downloading {source} ..."
|
||||
self.rsync(source, destination, description)
|
||||
self.rsync(
|
||||
source,
|
||||
destination,
|
||||
description,
|
||||
[
|
||||
"--exclude",
|
||||
f"{remote_path}/artifacts/salt",
|
||||
"--exclude",
|
||||
f"{remote_path}/artifacts/salt-*.*",
|
||||
],
|
||||
)
|
||||
|
||||
def rsync(self, source, destination, description, rsync_flags: list[str] = None):
|
||||
"""
|
||||
|
|
Loading…
Add table
Reference in a new issue