Initial work for the Self Github Actions Runners

Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
Pedro Algarvio 2022-09-26 17:58:19 +01:00 committed by Megan Wilhite
parent 2c683d8fc0
commit 1f2c1fac9b
20 changed files with 2115 additions and 337 deletions

View file

@ -32,14 +32,18 @@ ignore_errors = True
[paths]
salt =
salt/
/tmp/testing/salt/
/tmp/kitchen/testing/salt/
/private/tmp/kitchen/testing/salt/
C:\Windows\Temp\testing\salt
C:\Users\admini~1\AppData\Local\Temp\kitchen\testing\salt\
C:\Users\Administrator\AppData\Local\Temp\kitchen\testing\salt\
tests =
tests/
/tmp/testing/tests/
/tmp/kitchen/testing/tests/
/private/tmp/kitchen/testing/tests/
C:\Windows\Temp\testing\tests
C:\Users\admini~1\AppData\Local\Temp\kitchen\testing\tests\
C:\Users\Administrator\AppData\Local\Temp\kitchen\testing\tests\
extension_modules =

152
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,152 @@
name: CI
on:
- push
- pull_request
permissions:
contents: read
jobs:
get-changed-files:
name: Get Changed Files
runs-on: ubuntu-latest
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
outputs:
changed-files: ${{ toJSON(steps.changed-files.outputs) }}
steps:
- uses: actions/checkout@v3
- name: Get Changed Files
id: changed-files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
repo:
- added|modified:
- '**'
deleted:
- deleted:
- '**'
docs:
- doc/**
salt:
- added|modified:
- setup.py
- noxfile.py
- salt/**/*.py
- tasks/**/*.py
- tools/**/*.py
tests:
- added|modified:
- tests/**/*.py
pylintrc:
- added|modified:
- .pylintrc
pre-commit:
name: Pre-Commit
uses: ./.github/workflows/pre-commit-action.yml
needs:
- get-changed-files
with:
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
docs:
name: Build Docs
uses: ./.github/workflows/docs-action.yml
needs:
- get-changed-files
with:
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
lint:
name: Lint
uses: ./.github/workflows/lint-action.yml
needs:
- get-changed-files
with:
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
debian-11:
name: Debian 11
needs:
- get-changed-files
uses: ./.github/workflows/test-action.yml
with:
distro-slug: debian-11
nox-session: ci-test-3
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
debian-11-arm64:
name: Debian 11 ARM64
needs:
- get-changed-files
uses: ./.github/workflows/test-action.yml
with:
distro-slug: debian-11-arm64
nox-session: ci-test-3
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
ubuntu-2204:
name: Ubuntu 22.04
needs:
- get-changed-files
uses: ./.github/workflows/test-action.yml
with:
distro-slug: ubuntu-22.04
nox-session: ci-test-3
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
windows-2019:
name: Windows 2019
needs:
- get-changed-files
uses: ./.github/workflows/test-action.yml
with:
distro-slug: windows-2019
nox-session: ci-test-3
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
set-pipeline-exit-status:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
runs-on: ubuntu-latest
if: always()
needs:
- pre-commit
- docs
- lint
- debian-11
- debian-11-arm64
- ubuntu-2204
- windows-2019
steps:
- name: Download Exit Status Files
if: always()
uses: actions/download-artifact@v3
with:
name: exitstatus
path: exitstatus
- name: Delete Exit Status Artifacts
if: always()
uses: geekyeggo/delete-artifact@v1
with:
name: exitstatus
failOnError: false
- name: Set Pipeline Exit Status
run: |
tree exitstatus
grep -RE 'failure|cancelled' exitstatus/ && exit 1 || exit 0
- name: Done
if: always()
run:
echo "All worflows finished"

132
.github/workflows/docs-action.yml vendored Normal file
View file

@ -0,0 +1,132 @@
name: Docs
on:
workflow_call:
inputs:
changed-files:
required: true
type: string
description: JSON string containing information about changed files
jobs:
Salt:
name: Build Salt Documentation
runs-on: ubuntu-latest
if: ${{ fromJSON(inputs.changed-files)['docs'] == 'true' }}
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev xz-utils
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Nox
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- name: Install Python Requirements
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e 'docs-html(compress=False, clean=True)'
- name: Build Docs
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e 'docs-html(compress=False, clean=True)'
- name: Store Generated Documentation
uses: actions/upload-artifact@v3
with:
name: salt-html-docs
path: doc/_build/html
if-no-files-found: error
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-docs-html
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error
Manpages:
name: Build Salt man Pages
runs-on: ubuntu-latest
if: ${{ fromJSON(inputs.changed-files)['docs'] == 'true' }}
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- name: Install Nox
if: github.event_name == 'push' || env.CF_DOCS == 'true'
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- name: Install Python Requirements
if: github.event_name == 'push' || fromJSON(inputs.changed-files)['docs'] == 'true'
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e 'docs-man(compress=False, update=False, clean=True)'
- name: Build Manpages
if: github.event_name == 'push' || fromJSON(inputs.changed-files)['docs'] == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e 'docs-man(compress=False, update=False, clean=True)'
- name: Store Generated Documentation
if: github.event_name == 'push' || fromJSON(inputs.changed-files)['docs'] == 'true'
uses: actions/upload-artifact@v3
with:
name: salt-man-pages
path: doc/_build/man
if-no-files-found: error
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-docs-man
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error

View file

@ -1,125 +0,0 @@
name: Docs
on: [push, pull_request]
permissions:
contents: read
jobs:
Salt:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Build Salt Documentation
runs-on: ubuntu-latest
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev xz-utils
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Nox
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- id: changed-files
name: Get Changed Files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
docs:
- doc/**
- name: Install Python Requirements
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e 'docs-html(compress=False, clean=True)'
- name: Build Docs
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e 'docs-html(compress=False, clean=True)'
- name: Store Generated Documentation
uses: actions/upload-artifact@v3
with:
name: salt-html-docs
path: doc/_build/html
Manpages:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Build Salt man Pages
runs-on: ubuntu-latest
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- id: changed-files
name: Get Changed Files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
docs:
- doc/**
- name: Install Nox
if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true'
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- name: Install Python Requirements
if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true'
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e 'docs-man(compress=False, update=False, clean=True)'
- name: Build Manpages
if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e 'docs-man(compress=False, update=False, clean=True)'
- name: Store Generated Documentation
if: github.event_name == 'push' || steps.changed-files.outputs.docs == 'true'
uses: actions/upload-artifact@v3
with:
name: salt-man-pages
path: doc/_build/man

128
.github/workflows/lint-action.yml vendored Normal file
View file

@ -0,0 +1,128 @@
name: Lint
on:
workflow_call:
inputs:
changed-files:
required: true
type: string
description: JSON string containing information about changed files
jobs:
Salt:
name: Lint Salt's Source Code
runs-on: ubuntu-latest
if: ${{ fromJSON(inputs.changed-files)['salt'] == 'true' || fromJSON(inputs.changed-files)['pylintrc'] == 'true' }}
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- name: Install Nox
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- name: Install Python Requirements
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e lint-salt
- name: Lint Changed Files
if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['salt'] == 'true' && fromJSON(inputs.changed-files)['pylintrc'] != 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-salt -- ${{ join(fromJSON(inputs.changed-files)['salt_files'], ' ') }}
- name: Lint ALL Files
if: github.event_name != 'pull_request' || fromJSON(inputs.changed-files)['pylintrc'] == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-salt
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-lint-salt
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error
Tests:
name: Lint Salt's Test Suite
runs-on: ubuntu-latest
if: ${{ fromJSON(inputs.changed-files)['tests'] == 'true' || fromJSON(inputs.changed-files)['pylintrc'] == 'true' }}
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- name: Install Nox
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- name: Install Python Requirements
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e lint-tests
- name: Lint Changed Files
if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['tests'] == 'true' && fromJSON(inputs.changed-files)['pylintrc'] != 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-tests -- ${{ join(fromJSON(inputs.changed-files)['tests_files'], ' ') }}
- name: Lint ALL Files
if: github.event_name != 'pull_request' || fromJSON(inputs.changed-files)['pylintrc'] == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-tests
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-lint-salt
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error

View file

@ -1,134 +0,0 @@
name: Lint
on: [push, pull_request]
permissions:
contents: read
jobs:
Salt:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Lint Salt's Source Code
runs-on: ubuntu-latest
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- name: Install Nox
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- id: changed-files
name: Get Changed Files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
salt:
- added|modified:
- setup.py
- noxfile.py
- salt/**/*.py
- tasks/**/*.py
rcfile:
- added|modified:
- .pylintrc
- name: Install Python Requirements
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e lint-salt
- name: Lint Changed Files
if: github.event_name == 'pull_request' && steps.changed-files.outputs.salt == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-salt -- ${{ join(fromJSON(steps.changed-files.outputs.salt_files), ' ') }}
- name: Lint ALL Files
if: steps.changed-files.outputs.salt == 'true' || steps.changed-files.outputs.rcfile == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-salt
Tests:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Lint Salt's Test Suite
runs-on: ubuntu-latest
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- name: Install Nox
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install nox
- id: changed-files
name: Get Changed Files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
tests:
- added|modified:
- tests/**/*.py
rcfile:
- added|modified:
- .pylintrc
- name: Install Python Requirements
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run:
nox --install-only --forcecolor -e lint-tests
- name: Lint Changed Files
if: github.event_name == 'pull_request' && steps.changed-files.outputs.tests == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-tests -- ${{ join(fromJSON(steps.changed-files.outputs.tests_files), ' ') }}
- name: Lint ALL Files
if: steps.changed-files.outputs.tests == 'true' || steps.changed-files.outputs.rcfile == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e lint-tests

72
.github/workflows/pre-commit-action.yml vendored Normal file
View file

@ -0,0 +1,72 @@
name: Pre-Commit
on:
workflow_call:
inputs:
changed-files:
required: true
type: string
description: JSON string containing information about changed files
jobs:
Pre-Commit:
name: Run Pre-Commit Against Salt
runs-on: ubuntu-latest
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- name: Install Pre-Commit
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install pre-commit
pre-commit install --install-hooks
- name: Check ALL Files On Branch
if: github.event_name != 'pull_request'
env:
SKIP: lint-salt,lint-tests,remove-import-headers,rstcheck
run: |
pre-commit run --show-diff-on-failure --color=always --all-files
- name: Check Changed Files On PR
if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo'] == 'true'
env:
SKIP: lint-salt,lint-tests
run: |
pre-commit run --show-diff-on-failure --color=always --files ${{ join(fromJSON(inputs.changed-files)['repo_files'], ' ') }}
- name: Check Docs On Deleted Files
if: fromJSON(inputs.changed-files)['deleted'] == 'true'
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
pre-commit run --show-diff-on-failure --color=always check-docs --files ${{ join(fromJSON(inputs.changed-files)['deleted_files'], ' ') }}
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-pre-commit
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error

View file

@ -1,72 +0,0 @@
name: Pre-Commit
on: [push, pull_request]
permissions:
contents: read
jobs:
Pre-Commit:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Run Pre-Commit Against Salt
runs-on: ubuntu-latest
container:
image: python:3.8.6-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
- name: Install Pre-Commit
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
python -m pip install --upgrade pip
pip install pre-commit
pre-commit install --install-hooks
- id: changed-files
name: Get Changed Files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
repo:
- added|modified:
- '**'
deleted:
- deleted:
- '**'
- name: Check ALL Files On Branch
if: github.event_name != 'pull_request'
env:
SKIP: lint-salt,lint-tests,remove-import-headers,rstcheck
run: |
pre-commit run --show-diff-on-failure --color=always --all-files
- name: Check Changed Files On PR
if: github.event_name == 'pull_request' && steps.changed-files.outputs.repo == 'true'
env:
SKIP: lint-salt,lint-tests
run: |
pre-commit run --show-diff-on-failure --color=always --files ${{ join(fromJSON(steps.changed-files.outputs.repo_files), ' ') }}
- name: Check Docs On Deleted Files
if: steps.changed-files.outputs.deleted == 'true'
env:
PIP_EXTRA_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
run: |
pre-commit run --show-diff-on-failure --color=always check-docs --files ${{ join(fromJSON(steps.changed-files.outputs.deleted_files), ' ') }}

291
.github/workflows/test-action.yml vendored Normal file
View file

@ -0,0 +1,291 @@
name: Test Artifact
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
nox-session:
required: true
type: string
description: The nox session to run
changed-files:
required: true
type: string
description: JSON string containing information about changed files
env:
NOX_VERSION: "2022.8.7"
COLUMNS: 160
jobs:
dependencies:
name: Setup Test Dependencies
runs-on:
- self-hosted
timeout-minutes: 90
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt') }}
# Check the existance of nox.{distro-slug}.tar.*.
# If it exists, it means the cache was hit and we don't have to regenerate it again
- name: Check nox.${{ inputs.distro-slug }}.tar.* Exists
id: check-nox-tarball
uses: andstor/file-existence-action@v1
with:
files: "nox.${{ inputs.distro-slug }}.tar.*"
- name: PyPi Proxy
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
- name: Install Python Tools Scripts
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
python3 -m pip install -r requirements/static/ci/py3.10/tools.txt
- name: Start VM
if: steps.check-nox-tarball.outputs.files_exists != 'true'
id: spin-up-vm
run: |
tools vm create ${{ inputs.distro-slug }}
- name: Upload Checkout To VM
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
tools vm rsync ${{ inputs.distro-slug }}
- name: Install Dependencies
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
tools vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }}
- name: Compress .nox Directory
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
tools vm compress-dependencies ${{ inputs.distro-slug }}
- name: Download Compressed .nox Directory
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
tools vm download-dependencies ${{ inputs.distro-slug }}
- name: Destroy VM
if: always()
run: |
tools vm destroy ${{ inputs.distro-slug }} || true
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error
test:
name: Test
runs-on:
- self-hosted
timeout-minutes: 240 # 4 Hours
needs:
- dependencies
strategy:
fail-fast: false
matrix:
tests-chunk:
- unit
- functional
- integration
- scenarios
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }}
uses: actions/cache@v3
with:
path: nox.${{ inputs.distro-slug }}.tar.*
key: testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt') }}
- name: PyPi Proxy
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
- name: Install Python Tools Scripts
run: |
python3 -m pip install -r requirements/static/ci/py3.10/tools.txt
python3 -m pip install 'nox==${{ env.NOX_VERSION }}'
- name: Start VM
id: spin-up-vm
run: |
tools vm create ${{ inputs.distro-slug }}
- name: Upload Checkout To VM
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
tools vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
if: steps.check-nox-tarball.outputs.files_exists != 'true'
run: |
tools vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Run Tests
id: run-tests
run: |
tools vm test --print-tests-selection --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
- name: Combine Coverage Reports
if: always() && steps.spin-up-vm.conclusion == 'success' && steps.run-tests.conclusion != 'cancelled' && steps.run-tests.conclusion != 'skipped'
run: |
tools vm combine-coverage ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.conclusion == 'success' && steps.run-tests.conclusion != 'cancelled' && steps.run-tests.conclusion != 'skipped'
run: |
tools vm download-artifacts ${{ inputs.distro-slug }}
tree -a artifacts
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.tests-chunk }}
- name: Destroy VM
if: always()
run: |
tools vm destroy ${{ inputs.distro-slug }} || true
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.conclusion == 'success'
uses: actions/upload-artifact@v3
with:
name: artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: artifacts
# - name: Publish Test Report
# uses: mikepenz/action-junit-report@v3
# if: always() && steps.download-artifacts-from-vm.conclusion == 'success'
# with:
# check_name: Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }})
# report_paths: 'artifacts/xml-unittests-output/*.xml'
- name: Report Salt Code Coverage(${{ matrix.tests-chunk }})
if: always() && steps.download-artifacts-from-vm.conclusion == 'success'
run: |
nox --force-color -e report-coverage -- salt
- name: Report Tests Code Coverage(${{ matrix.tests-chunk }})
if: always() && steps.download-artifacts-from-vm.conclusion == 'success'
run: |
nox --force-color -e report-coverage -- tests
- name: Report Combined Code Coverage(${{ matrix.tests-chunk }})
if: always() && steps.download-artifacts-from-vm.conclusion == 'success'
run: |
nox --force-color -e report-coverage
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.tests-chunk }}-tests
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error
report:
name: Reports for ${{ inputs.distro-slug }}
runs-on: ubuntu-latest
needs: test
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
steps:
- name: Checkout Source Code
uses: actions/checkout@v3
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v3
with:
name: testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: artifacts
- name: Show Test Run Artifacts
if: always() && steps.download-test-run-artifacts.conclusion == 'success'
run: |
tree -a artifacts
- name: Upload Code Coverage DB
if: always() && steps.download-test-run-artifacts.conclusion == 'success'
uses: actions/upload-artifact@v3
with:
name: code-coverage
path: artifacts/coverage
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ env.NOX_VERSION }}'
- name: Report Salt Code Coverage
run: |
nox --force-color -e report-coverage -- salt
- name: Report Tests Code Coverage
run: |
nox --force-color -e report-coverage -- tests
- name: Report Combined Code Coverage
run: |
nox --force-color -e report-coverage
- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
# always run even if the previous steps fails
if: always() && steps.download-test-run-artifacts.conclusion == 'success'
with:
check_name: Overall Test Results(${{ inputs.distro-slug }})
report_paths: 'artifacts/xml-unittests-output/*.xml'
- name: Set Exit Status
if: always()
run: |
mkdir exitstatus
echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-report
- name: Upload Exit Status
if: always()
uses: actions/upload-artifact@v3
with:
name: exitstatus
path: exitstatus
if-no-files-found: error

6
.gitignore vendored
View file

@ -121,3 +121,9 @@ requirements/static/*/py*/*.log
# Vim's default session file
Session.vim
# VM task state directory
.vms-state/
# Nox requirements archives
nox.*.tar.bzip2

View file

@ -1027,6 +1027,30 @@ repos:
- requirements/static/ci/invoke.in
# <---- Invoke -----------------------------------------------------------------------------------------------------
# ----- Tools ---------------------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-ci-tools-3.9-requirements
name: Linux CI Py3.9 Tools Requirements
files: ^requirements/static/ci/(tools\.in|py3.9/(tools|linux)\.txt)$
pass_filenames: false
args:
- -v
- --py-version=3.9
- --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt
- requirements/static/ci/tools.in
- id: pip-tools-compile
alias: compile-ci-tools-3.10-requirements
name: Linux CI Py3.10 Tools Requirements
files: ^requirements/static/ci/(tools\.in|py3.10/(tools|linux)\.txt)$
pass_filenames: false
args:
- -v
- --py-version=3.10
- --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt
- requirements/static/ci/tools.in
# <---- Tools -----------------------------------------------------------------------------------------------------
# ----- Local Hooks ----------------------------------------------------------------------------------------------->
- repo: local
hooks:
@ -1245,6 +1269,20 @@ repos:
- jinja2==3.0.3
- msgpack==1.0.3
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.971
hooks:
- id: mypy
alias: mypy-tools
name: Run mypy against tools
files: ^tools/.*\.py$
#args: [--strict]
additional_dependencies:
- attrs
- rich
- types-attrs
- types-pyyaml
- repo: https://github.com/saltstack/mirrors-nox
rev: v2021.6.12
hooks:

View file

@ -694,7 +694,8 @@ allowed-3rd-party-modules=msgpack,
copy,
base64,
tempfile,
fnmatch
fnmatch,
ptscripts
[EXCEPTIONS]

13
cicd/images.yml Normal file
View file

@ -0,0 +1,13 @@
debian-11:
ami: ami-0446131071e8012bc
ssh_username: admin
debian-11-arm64:
ami: ami-00900a8ea49344c7d
ssh_username: admin
ubuntu-22.04:
ami: ami-0cfac30ab1d41cf6c
ssh_username: ubuntu
windows-2019:
ami: ami-0b4f5ddce805b3c17
ssh_username: Administrator
connect_timeout: 600

View file

@ -32,6 +32,16 @@ CI_RUN = (
or os.environ.get("DRONE") is not None
)
PIP_INSTALL_SILENT = CI_RUN is False
PRINT_TEST_SELECTION = os.environ.get("PRINT_TEST_SELECTION")
if PRINT_TEST_SELECTION is None:
PRINT_TEST_SELECTION = CI_RUN
else:
PRINT_TEST_SELECTION = PRINT_TEST_SELECTION == "1"
PRINT_SYSTEM_INFO = os.environ.get("PRINT_SYSTEM_INFO")
if PRINT_SYSTEM_INFO is None:
PRINT_SYSTEM_INFO = CI_RUN
else:
PRINT_SYSTEM_INFO = PRINT_SYSTEM_INFO == "1"
SKIP_REQUIREMENTS_INSTALL = os.environ.get("SKIP_REQUIREMENTS_INSTALL", "0") == "1"
EXTRA_REQUIREMENTS_INSTALL = os.environ.get("EXTRA_REQUIREMENTS_INSTALL")
COVERAGE_REQUIREMENT = os.environ.get("COVERAGE_REQUIREMENT")
@ -407,10 +417,7 @@ def _run_with_coverage(session, *test_cmd, env=None):
def _report_coverage(session):
if SKIP_REQUIREMENTS_INSTALL is False:
session.install(
"--progress-bar=off", COVERAGE_REQUIREMENT, silent=PIP_INSTALL_SILENT
)
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
@ -945,6 +952,7 @@ def _pytest(session, coverage, cmd_args):
"--show-capture=no",
"-ra",
"-s",
"-vv",
"--showlocals",
]
for arg in cmd_args:
@ -954,7 +962,10 @@ def _pytest(session, coverage, cmd_args):
args.append("--log-file={}".format(RUNTESTS_LOGFILE))
args.extend(cmd_args)
if CI_RUN:
if PRINT_SYSTEM_INFO and "--sysinfo" not in args:
args.append("--sysinfo")
if PRINT_TEST_SELECTION:
# We'll print out the collected tests on CI runs.
# This will show a full list of what tests are going to run, in the right order, which, in case
# of a test suite hang, helps us pinpoint which test is hanging
@ -978,11 +989,173 @@ def _pytest(session, coverage, cmd_args):
session.run("python", "-m", "pytest", *args, env=env)
def _ci_test(session, transport):
# Install requirements
_install_requirements(session, transport)
chunks = {
"unit": [
"tests/unit",
"tests/pytests/unit",
],
"functional": [
"tests/pytests/functional",
],
"scenarios": ["tests/pytests/scenarios"],
}
if not session.posargs:
chunk_cmd = []
junit_report_filename = "test-results"
runtests_log_filename = "runtests"
else:
chunk = session.posargs.pop(0)
if chunk in ["unit", "functional", "integration", "scenarios", "all"]:
if chunk == "all":
chunk_cmd = []
junit_report_filename = "test-results"
runtests_log_filename = "runtests"
elif chunk == "integration":
chunk_cmd = []
for values in chunks.values():
for value in values:
chunk_cmd.append(f"--ignore={value}")
junit_report_filename = f"test-results-{chunk}"
runtests_log_filename = f"runtests-{chunk}"
else:
chunk_cmd = chunks[chunk]
junit_report_filename = f"test-results-{chunk}"
runtests_log_filename = f"runtests-{chunk}"
if session.posargs:
if session.posargs[0] == "--":
session.posargs.pop(0)
chunk_cmd.extend(session.posargs)
else:
chunk_cmd = [chunk] + session.posargs
junit_report_filename = "test-results"
runtests_log_filename = "runtests"
rerun_failures = os.environ.get("RERUN_FAILURES", "0") == "1"
track_code_coverage = os.environ.get("CI_TRACK_COVERAGE", "1") == "1"
common_pytest_args = [
"--color=yes",
"--run-slow",
"--ssh-tests",
"--sys-stats",
"--run-destructive",
"--output-columns=120",
]
try:
pytest_args = (
common_pytest_args[:]
+ [
f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}.xml",
f"--log-file=artifacts/logs/{runtests_log_filename}.log",
]
+ chunk_cmd
)
_pytest(session, track_code_coverage, pytest_args)
except CommandFailed:
if rerun_failures is False:
raise
# Don't print the system information, not the test selection on reruns
global PRINT_TEST_SELECTION
global PRINT_SYSTEM_INFO
PRINT_TEST_SELECTION = False
PRINT_SYSTEM_INFO = False
pytest_args = (
common_pytest_args[:]
+ [
"--lf",
f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}-rerun.xml",
f"--log-file=artifacts/logs/{runtests_log_filename}-rerun.log",
]
+ chunk_cmd
)
_pytest(session, track_code_coverage, pytest_args)
@nox.session(python=_PYTHON_VERSIONS, name="ci-test")
def ci_test(session):
_ci_test(session, "zeromq")
@nox.session(python=_PYTHON_VERSIONS, name="ci-test-tcp")
def ci_test_tcp(session):
_ci_test(session, "tcp")
@nox.session(python="3", name="report-coverage")
def report_coverage(session):
_report_coverage(session)
@nox.session(python=False, name="decompress-dependencies")
def decompress_dependencies(session):
if not session.posargs:
session.error(
"Please pass the distro-slug to run tests against. "
"Check cicd/images.yml for what's available."
)
distro_slug = session.posargs.pop(0)
if IS_WINDOWS:
nox_dependencies_tarball = f"nox.{distro_slug}.tar.gz"
else:
nox_dependencies_tarball = f"nox.{distro_slug}.tar.xz"
nox_dependencies_tarball_path = REPO_ROOT / nox_dependencies_tarball
if not nox_dependencies_tarball_path.exists():
session.error(
f"The {nox_dependencies_tarball} file"
"does not exist. Not decompressing anything."
)
session_run_always(session, "tar", "xpf", nox_dependencies_tarball)
nox_dependencies_tarball_path.unlink()
@nox.session(python=False, name="compress-dependencies")
def compress_dependencies(session):
if not session.posargs:
session.error(
"Please pass the distro-slug to run tests against. "
"Check cicd/images.yml for what's available."
)
distro_slug = session.posargs.pop(0)
if IS_WINDOWS:
nox_dependencies_tarball = f"nox.{distro_slug}.tar.gz"
else:
nox_dependencies_tarball = f"nox.{distro_slug}.tar.xz"
nox_dependencies_tarball_path = REPO_ROOT / nox_dependencies_tarball
if nox_dependencies_tarball_path.exists():
session_warn(
session, f"Found existing {nox_dependencies_tarball}. Deleting it."
)
nox_dependencies_tarball_path.unlink()
session_run_always(session, "tar", "-caf", nox_dependencies_tarball, ".nox")
@nox.session(python="3", name="combine-coverage")
def combine_coverage(session):
_install_coverage_requirement(session)
env = {
# The full path to the .coverage data file. Makes sure we always write
# them to the same directory
"COVERAGE_FILE": str(COVERAGE_FILE),
}
# Always combine and generate the XML coverage report
try:
session.run("coverage", "combine", env=env)
except CommandFailed:
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
class Tee:
"""
Python class to mimic linux tee behaviour

View file

@ -0,0 +1,36 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.10/tools.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/static/ci/tools.in
#
attrs==22.1.0
# via
# -r requirements/static/ci/tools.in
# python-tools-scripts
boto3==1.24.80
# via -r requirements/static/ci/tools.in
botocore==1.27.80
# via
# boto3
# s3transfer
commonmark==0.9.1
# via rich
jmespath==1.0.1
# via
# boto3
# botocore
pygments==2.13.0
# via rich
python-dateutil==2.8.2
# via botocore
python-tools-scripts==0.9.0rc3
# via -r requirements/static/ci/tools.in
rich==12.5.1
# via python-tools-scripts
s3transfer==0.6.0
# via boto3
six==1.16.0
# via python-dateutil
urllib3==1.26.12
# via botocore

View file

@ -0,0 +1,36 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --output-file=requirements/static/ci/py3.9/tools.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/static/ci/tools.in
#
attrs==22.1.0
# via
# -r requirements/static/ci/tools.in
# python-tools-scripts
boto3==1.24.80
# via -r requirements/static/ci/tools.in
botocore==1.27.80
# via
# boto3
# s3transfer
commonmark==0.9.1
# via rich
jmespath==1.0.1
# via
# boto3
# botocore
pygments==2.13.0
# via rich
python-dateutil==2.8.2
# via botocore
python-tools-scripts==0.9.0rc3
# via -r requirements/static/ci/tools.in
rich==12.5.1
# via python-tools-scripts
s3transfer==0.6.0
# via boto3
six==1.16.0
# via python-dateutil
urllib3==1.26.12
# via botocore

View file

@ -0,0 +1,3 @@
python-tools-scripts >= 0.9.0rc3
attrs
boto3

View file

@ -1,3 +1,12 @@
[sdist]
owner = root
group = root
[mypy]
python_version = 3.9
show_error_codes = True
warn_return_any = True
warn_unused_configs = True
[mypy.tools]
ignore_missing_imports = True

6
tools/__init__.py Normal file
View file

@ -0,0 +1,6 @@
import logging
import tools.vm
for name in ("boto3", "botocore", "urllib3"):
logging.getLogger(name).setLevel(logging.INFO)

1009
tools/vm.py Normal file

File diff suppressed because it is too large Load diff