mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch 'master' into metadata_azure
This commit is contained in:
commit
5e9b82c201
49 changed files with 3302 additions and 1274 deletions
7
.github/actionlint.yaml
vendored
Normal file
7
.github/actionlint.yaml
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
self-hosted-runner:
|
||||
# Labels of self-hosted runner in array of string
|
||||
labels:
|
||||
- bastion
|
||||
- x86_64
|
||||
- arm64
|
||||
- aarch64
|
14
.github/actions/build-source-tarball/action.yml
vendored
14
.github/actions/build-source-tarball/action.yml
vendored
|
@ -2,6 +2,10 @@
|
|||
name: build-source-tarball
|
||||
description: Build Source Tarball
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to building the tarball.
|
||||
nox-version:
|
||||
required: false
|
||||
type: string
|
||||
|
@ -28,13 +32,13 @@ runs:
|
|||
- name: Download Release Changes
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: salt-${{ env.SALT_VERSION }}.diff
|
||||
name: salt-${{ inputs.salt-version }}.diff
|
||||
|
||||
- name: Apply release changes
|
||||
shell: bash
|
||||
run: |
|
||||
git apply salt-${{ env.SALT_VERSION }}.diff
|
||||
rm salt-${{ env.SALT_VERSION }}.diff
|
||||
git apply salt-${{ inputs.salt-version }}.diff
|
||||
rm salt-${{ inputs.salt-version }}.diff
|
||||
|
||||
- name: Create Source Tarball
|
||||
shell: bash
|
||||
|
@ -44,12 +48,12 @@ runs:
|
|||
- name: Create Hash Files
|
||||
shell: bash
|
||||
run: |
|
||||
tools pkg generate-hashes dist/salt-${{ env.SALT_VERSION }}.tar.gz
|
||||
tools pkg generate-hashes dist/salt-${{ inputs.salt-version }}.tar.gz
|
||||
|
||||
- name: Upload Source Tarball as an Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: salt-${{ env.SALT_VERSION }}.tar.gz
|
||||
name: salt-${{ inputs.salt-version }}.tar.gz
|
||||
path: dist/salt-*.tar.gz*
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
|
70
.github/actions/release-changes/action.yml
vendored
70
.github/actions/release-changes/action.yml
vendored
|
@ -1,70 +0,0 @@
|
|||
---
|
||||
name: release-changes
|
||||
description: Create release changes diff
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to creating the release changes
|
||||
|
||||
env:
|
||||
COLUMNS: 160
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
|
||||
- name: Pip Install Tools Requirements
|
||||
shell: bash
|
||||
run: |
|
||||
pip3 install -r $(pwd)/requirements/static/ci/py3.10/tools.txt
|
||||
pip3 install -r $(pwd)/requirements/static/ci/py3.10/changelog.txt
|
||||
pip3 install -r $(pwd)/requirements/static/ci/py3.10/docs.txt
|
||||
|
||||
- name: Set salt version
|
||||
shell: bash
|
||||
run: |
|
||||
echo '${{ inputs.salt-version }}' > salt/_version.txt
|
||||
|
||||
- name: Update Debian changelog
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-deb --draft
|
||||
tools changelog update-deb
|
||||
|
||||
- name: Update RPM changelog
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-rpm --draft
|
||||
tools changelog update-rpm
|
||||
|
||||
- name: Update Release Notes
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-release-notes --draft
|
||||
tools changelog update-release-notes
|
||||
|
||||
- name: Update CHANGELOG.md
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-release-notes --draft
|
||||
tools changelog update-release-notes
|
||||
|
||||
- name: Update CHANGELOG.md
|
||||
shell: bash
|
||||
run: |
|
||||
tools docs man
|
||||
|
||||
- name: Create release changes diff
|
||||
shell: bash
|
||||
run: |
|
||||
git diff --no-color > salt-${{ inputs.salt-version }}.diff
|
||||
|
||||
- name: Upload Source Tarball as an Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: salt-${{ inputs.salt-version }}.diff
|
||||
path: salt-${{ inputs.salt-version }}.diff
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
29
.github/actions/setup-actionlint/action.yml
vendored
Normal file
29
.github/actions/setup-actionlint/action.yml
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
name: setup-actionlint
|
||||
description: Setup actionlint
|
||||
inputs:
|
||||
version:
|
||||
description: The version of actionlint
|
||||
default: v1.6.23
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
|
||||
- name: Cache actionlint Binary
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /usr/local/bin/actionlint
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-actionlint-${{ inputs.version }}
|
||||
|
||||
- name: Setup actionlint
|
||||
shell: bash
|
||||
run: |
|
||||
if ! command -v actionlint; then
|
||||
bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/${{ inputs.version }}/scripts/download-actionlint.bash)
|
||||
mv ./actionlint /usr/local/bin/actionlint
|
||||
fi
|
||||
- name: Show actionlint Version
|
||||
shell: bash
|
||||
run: |
|
||||
actionlint --version
|
31
.github/actions/setup-shellcheck/action.yml
vendored
Normal file
31
.github/actions/setup-shellcheck/action.yml
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
name: setup-shellcheck
|
||||
description: Setup shellcheck
|
||||
inputs:
|
||||
version:
|
||||
description: The version of shellcheck
|
||||
default: v0.9.0
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
|
||||
- name: Cache shellcheck Binary
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /usr/local/bin/shellcheck
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-shellcheck-${{ inputs.version }}
|
||||
|
||||
- name: Setup shellcheck
|
||||
shell: bash
|
||||
run: |
|
||||
if ! command -v shellcheck; then
|
||||
wget https://github.com/koalaman/shellcheck/releases/download/${{ inputs.version }}/shellcheck-${{ inputs.version }}.${{ runner.os }}.x86_64.tar.xz
|
||||
tar xf shellcheck-${{ inputs.version }}.${{ runner.os }}.x86_64.tar.xz
|
||||
mv shellcheck-${{ inputs.version }}/shellcheck /usr/local/bin/shellcheck
|
||||
rm -rf shellcheck-${{ inputs.version }}.${{ runner.os }}.x86_64.tar.xz shellcheck-${{ inputs.version }}
|
||||
fi
|
||||
- name: Show shellcheck Version
|
||||
shell: bash
|
||||
run: |
|
||||
shellcheck --version
|
4
.github/workflows/build-deb-packages.yml
vendored
4
.github/workflows/build-deb-packages.yml
vendored
|
@ -43,10 +43,6 @@ jobs:
|
|||
apt update
|
||||
apt install -y python3 python3-venv python3-pip build-essential devscripts debhelper bash-completion git
|
||||
|
||||
- name: List CWD
|
||||
run: |
|
||||
ls -la $(cwd)
|
||||
|
||||
- name: Download Release Changes
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
|
|
2
.github/workflows/build-rpm-packages.yml
vendored
2
.github/workflows/build-rpm-packages.yml
vendored
|
@ -61,7 +61,7 @@ jobs:
|
|||
SALT_ONEDIR_ARCHIVE: "${{ github.workspace }}/artifacts/salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz"
|
||||
run: |
|
||||
echo "${{ inputs.salt-version }}" > salt/_version.txt
|
||||
rpmbuild -bb --define="_salt_src $(pwd)" $(pwd)/pkg/rpm/salt.spec
|
||||
rpmbuild -bb --define="_salt_src $(pwd)" "$(pwd)/pkg/rpm/salt.spec"
|
||||
|
||||
- name: Upload RPMs
|
||||
uses: actions/upload-artifact@v3
|
||||
|
|
80
.github/workflows/ci.yml
vendored
80
.github/workflows/ci.yml
vendored
|
@ -1,28 +1,10 @@
|
|||
# Do not edit these workflows directly as the changes made will be overwritten.
|
||||
# Instead, edit the template '.github/workflows/templates/ci.yml.j2'
|
||||
---
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push: {}
|
||||
pull_request: {}
|
||||
schedule:
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
|
||||
- cron: '0 */8 * * *' # Run every 8 hours
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
default: ""
|
||||
description: >
|
||||
The Salt version to set prior to running tests or building packages.
|
||||
If not set, it is discover at run time, like, for example, capturing
|
||||
the output of running `python3 salt/version.py`
|
||||
testrun-type:
|
||||
description: Type of test run
|
||||
required: true
|
||||
default: 'full'
|
||||
type: choice
|
||||
options:
|
||||
- full
|
||||
- changed
|
||||
|
||||
env:
|
||||
COLUMNS: 160
|
||||
|
@ -38,7 +20,7 @@ concurrency:
|
|||
# not cancel previous builds.
|
||||
# However, for every new build against the same pull request source branch,
|
||||
# all older builds against that same branch get canceled.
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
@ -134,7 +116,7 @@ jobs:
|
|||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
salt-version: ""
|
||||
|
||||
- name: Write Changed Files To A Local File
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
|
@ -219,40 +201,22 @@ jobs:
|
|||
with:
|
||||
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
|
||||
|
||||
release-changes:
|
||||
name: Create Release Diff
|
||||
prepare-release:
|
||||
name: Prepare Release
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
- name: Generate Changes Diff
|
||||
uses: ./.github/actions/release-changes
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
uses: ./.github/workflows/prepare-release.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
build-source-tarball:
|
||||
name: Build Source Tarball
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- release-changes
|
||||
- prepare-release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
@ -273,6 +237,8 @@ jobs:
|
|||
|
||||
- name: Build Source Tarball
|
||||
uses: ./.github/actions/build-source-tarball
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
build-deps-onedir:
|
||||
name: Build Dependencies Onedir
|
||||
|
@ -312,7 +278,6 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
windows-2016:
|
||||
name: Windows 2016
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
|
@ -569,7 +534,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
photon-3:
|
||||
photonos-3:
|
||||
name: Photon OS 3
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
|
@ -585,7 +550,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
photon-4:
|
||||
photonos-4:
|
||||
name: Photon OS 4
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
|
@ -688,12 +653,17 @@ jobs:
|
|||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- pre-commit
|
||||
- docs
|
||||
- lint
|
||||
- build-deps-onedir
|
||||
- build-salt-onedir
|
||||
- build-pkgs
|
||||
- windows-2016
|
||||
- windows-2019
|
||||
- windows-2022
|
||||
- macos-12
|
||||
- almalinux-8
|
||||
- almalinux-9
|
||||
- amazonlinux-2
|
||||
|
@ -706,17 +676,13 @@ jobs:
|
|||
- debian-11-arm64
|
||||
- fedora-36
|
||||
- opensuse-15
|
||||
- photon-3
|
||||
- photon-4
|
||||
- photonos-3
|
||||
- photonos-4
|
||||
- ubuntu-1804
|
||||
- ubuntu-2004
|
||||
- ubuntu-2004-arm64
|
||||
- ubuntu-2204
|
||||
- ubuntu-2204-arm64
|
||||
- windows-2016
|
||||
- windows-2019
|
||||
- windows-2022
|
||||
- macos-12
|
||||
steps:
|
||||
- name: Get workflow information
|
||||
id: get-workflow-info
|
||||
|
|
8
.github/workflows/pre-commit-action.yml
vendored
8
.github/workflows/pre-commit-action.yml
vendored
|
@ -18,7 +18,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
container:
|
||||
image: python:3.8.6-slim-buster
|
||||
image: python:3.10-slim-buster
|
||||
|
||||
steps:
|
||||
|
||||
|
@ -26,17 +26,19 @@ jobs:
|
|||
run: |
|
||||
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
|
||||
apt-get update
|
||||
apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
|
||||
apt-get install -y wget curl enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
|
||||
apt-get install -y git/buster-backports
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/setup-actionlint
|
||||
- uses: ./.github/actions/setup-shellcheck
|
||||
|
||||
- name: Install Pre-Commit
|
||||
env:
|
||||
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
|
||||
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
|
||||
run: |
|
||||
pip install wheel pre-commit==${PRE_COMMIT_VERSION}
|
||||
pip install wheel "pre-commit==${PRE_COMMIT_VERSION}"
|
||||
pre-commit install --install-hooks
|
||||
|
||||
- name: Check ALL Files On Branch
|
||||
|
|
98
.github/workflows/prepare-release.yml
vendored
Normal file
98
.github/workflows/prepare-release.yml
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
name: Prepare Release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
salt-version:
|
||||
type: string
|
||||
required: true
|
||||
description: The Salt version to set prior to creating the release diff.
|
||||
cache-seed:
|
||||
required: true
|
||||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
python-version:
|
||||
required: false
|
||||
type: string
|
||||
default: "3.10"
|
||||
|
||||
env:
|
||||
COLUMNS: 160
|
||||
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
|
||||
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Create Release Diff
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "${{ inputs.python-version }}"
|
||||
|
||||
- name: Cache Python Tools Virtualenvs
|
||||
id: nox-dependencies-cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: .tools-venvs/
|
||||
key: ${{ inputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }}
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ inputs.salt-version }}"
|
||||
|
||||
- name: Update Debian changelog
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-deb --draft
|
||||
tools changelog update-deb
|
||||
|
||||
- name: Update RPM changelog
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-rpm --draft
|
||||
tools changelog update-rpm
|
||||
|
||||
- name: Update Release Notes
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-release-notes --draft
|
||||
tools changelog update-release-notes
|
||||
|
||||
- name: Generate MAN Pages
|
||||
shell: bash
|
||||
run: |
|
||||
tools docs man
|
||||
|
||||
- name: Update Changelog
|
||||
shell: bash
|
||||
run: |
|
||||
tools changelog update-changelog-md --draft
|
||||
tools changelog update-changelog-md
|
||||
|
||||
- name: Show Changes Diff
|
||||
shell: bash
|
||||
run: |
|
||||
git diff --color
|
||||
|
||||
- name: Create release changes diff
|
||||
shell: bash
|
||||
run: |
|
||||
git diff --no-color > salt-${{ inputs.salt-version }}.diff
|
||||
|
||||
- name: Upload Source Tarball as an Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: salt-${{ inputs.salt-version }}.diff
|
||||
path: salt-${{ inputs.salt-version }}.diff
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
701
.github/workflows/scheduled.yml
vendored
Normal file
701
.github/workflows/scheduled.yml
vendored
Normal file
|
@ -0,0 +1,701 @@
|
|||
# Do not edit these workflows directly as the changes made will be overwritten.
|
||||
# Instead, edit the template '.github/workflows/templates/scheduled.yml.j2'
|
||||
---
|
||||
name: Scheduled
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
|
||||
- cron: '0 */8 * * *' # Run every 8 hours
|
||||
|
||||
env:
|
||||
COLUMNS: 160
|
||||
CACHE_SEED: SEED-4 # Bump the number to invalidate all caches
|
||||
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
||||
|
||||
permissions:
|
||||
contents: read # for dorny/paths-filter to fetch a list of changed files
|
||||
pull-requests: read # for dorny/paths-filter to read pull requests
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
prepare-workflow:
|
||||
name: Prepare Workflow Run
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Full clone to also get the tags to get the right salt version
|
||||
|
||||
- name: Get Changed Files
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
id: changed-files
|
||||
uses: dorny/paths-filter@v2
|
||||
with:
|
||||
token: ${{ github.token }}
|
||||
list-files: json
|
||||
filters: |
|
||||
repo:
|
||||
- added|modified:
|
||||
- '**'
|
||||
doc-requirements:
|
||||
- added|modified: &doc_requirements
|
||||
- requirements/static/ci/py3.*/docs.txt
|
||||
lint-requirements:
|
||||
- added|modified: &lint_requirements
|
||||
- requirements/static/ci/py3.*/lint.txt
|
||||
pkg_requirements:
|
||||
- added|modified: &pkg_requirements
|
||||
- requirements/static/pkg/py3.*/darwin.txt
|
||||
- requirements/static/pkg/py3.*/linux.txt
|
||||
- requirements/static/pkg/py3.*/freebsd.txt
|
||||
- requirements/static/pkg/py3.*/windows.txt
|
||||
test_requirements:
|
||||
- added|modified: &test_requirements
|
||||
- requirements/static/ci/py3.*/darwin.txt
|
||||
- requirements/static/ci/py3.*/linux.txt
|
||||
- requirements/static/ci/py3.*/freebsd.txt
|
||||
- requirements/static/ci/py3.*/windows.txt
|
||||
- requirements/static/ci/py3.*/darwin-crypto.txt
|
||||
- requirements/static/ci/py3.*/linux-crypto.txt
|
||||
- requirements/static/ci/py3.*/freebsd-crypto.txt
|
||||
- requirements/static/ci/py3.*/windows-crypto.txt
|
||||
deleted:
|
||||
- deleted:
|
||||
- '**'
|
||||
docs:
|
||||
- added|modified:
|
||||
- doc/**
|
||||
- *doc_requirements
|
||||
salt:
|
||||
- added|modified: &salt_added_modified
|
||||
- setup.py
|
||||
- noxfile.py
|
||||
- salt/**/*.py
|
||||
- tasks/**/*.py
|
||||
- tools/**/*.py
|
||||
tests:
|
||||
- added|modified: &tests_added_modified
|
||||
- tests/**/*.py
|
||||
lint:
|
||||
- added|modified:
|
||||
- .pylintrc
|
||||
- *lint_requirements
|
||||
golden_images:
|
||||
- added|modified:
|
||||
- cicd/golden-images.json
|
||||
testrun:
|
||||
- added|modified:
|
||||
- *salt_added_modified
|
||||
- *tests_added_modified
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Pretty Print The GH Actions Event
|
||||
run:
|
||||
tools ci print-gh-event
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: ""
|
||||
|
||||
- name: Write Changed Files To A Local File
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
run:
|
||||
echo '${{ toJSON(steps.changed-files.outputs) }}' > changed-files.json
|
||||
|
||||
- name: Check Local Changed Files Contents
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
run:
|
||||
cat changed-files.json
|
||||
|
||||
- name: Process Changed Files
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
id: process-changed-files
|
||||
run:
|
||||
tools ci process-changed-files ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Collected Changed Files
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
run:
|
||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||
|
||||
- name: Define Jobs To Run
|
||||
id: define-jobs
|
||||
run:
|
||||
tools ci define-jobs ${{ github.event_name }}
|
||||
|
||||
- name: Check Collected Jobs
|
||||
run:
|
||||
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
|
||||
|
||||
- name: Define Testrun
|
||||
id: define-testrun
|
||||
run:
|
||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Defined Test Run
|
||||
run:
|
||||
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
|
||||
|
||||
- name: Check Contents of generated testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
run:
|
||||
cat testrun-changed-files.txt || true
|
||||
|
||||
- name: Upload testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: testrun-changed-files.txt
|
||||
path: testrun-changed-files.txt
|
||||
|
||||
- name: Set Cache Seed Output
|
||||
id: set-cache-seed
|
||||
run: |
|
||||
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
pre-commit:
|
||||
name: Pre-Commit
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
uses: ./.github/workflows/pre-commit-action.yml
|
||||
needs:
|
||||
- prepare-workflow
|
||||
with:
|
||||
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
|
||||
|
||||
docs:
|
||||
name: Build Docs
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
uses: ./.github/workflows/docs-action.yml
|
||||
needs:
|
||||
- prepare-workflow
|
||||
with:
|
||||
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
uses: ./.github/workflows/lint-action.yml
|
||||
needs:
|
||||
- prepare-workflow
|
||||
with:
|
||||
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
|
||||
|
||||
prepare-release:
|
||||
name: Prepare Release
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
uses: ./.github/workflows/prepare-release.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
build-source-tarball:
|
||||
name: Build Source Tarball
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- prepare-release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
- name: Build Source Tarball
|
||||
uses: ./.github/actions/build-source-tarball
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
build-deps-onedir:
|
||||
name: Build Dependencies Onedir
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
uses: ./.github/workflows/build-deps-onedir.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
build-salt-onedir:
|
||||
name: Build Salt Onedir
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-deps-onedir
|
||||
- build-source-tarball
|
||||
uses: ./.github/workflows/build-salt-onedir.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
build-pkgs:
|
||||
name: Build Salt Packages
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/build-packages.yml
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
windows-2016:
|
||||
name: Windows 2016
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2016
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
windows-2019:
|
||||
name: Windows 2019
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2019
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
windows-2022:
|
||||
name: Windows 2022
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2022
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
macos-12:
|
||||
name: macOS 12
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action-macos.yml
|
||||
with:
|
||||
distro-slug: macos-12
|
||||
nox-session: ci-test-onedir
|
||||
platform: darwin
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
almalinux-8:
|
||||
name: Alma Linux 8
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: almalinux-8
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
almalinux-9:
|
||||
name: Alma Linux 9
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: almalinux-9
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
amazonlinux-2:
|
||||
name: Amazon Linux 2
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: amazonlinux-2
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
archlinux-lts:
|
||||
name: Arch Linux LTS
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: archlinux-lts
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
centos-7:
|
||||
name: CentOS 7
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centos-7
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
centosstream-8:
|
||||
name: CentOS Stream 8
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centosstream-8
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
centosstream-9:
|
||||
name: CentOS Stream 9
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centosstream-9
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
debian-10:
|
||||
name: Debian 10
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-10
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
debian-11:
|
||||
name: Debian 11
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-11
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
debian-11-arm64:
|
||||
name: Debian 11 Arm64
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-11-arm64
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
fedora-36:
|
||||
name: Fedora 36
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
opensuse-15:
|
||||
name: Opensuse 15
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: opensuse-15
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
photonos-3:
|
||||
name: Photon OS 3
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: photonos-3
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
photonos-4:
|
||||
name: Photon OS 4
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: photonos-4
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-1804:
|
||||
name: Ubuntu 18.04
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-18.04
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2004:
|
||||
name: Ubuntu 20.04
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-20.04
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2004-arm64:
|
||||
name: Ubuntu 20.04 Arm64
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-20.04-arm64
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2204:
|
||||
name: Ubuntu 22.04
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-22.04
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2204-arm64:
|
||||
name: Ubuntu 22.04 Arm64
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-22.04-arm64
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
set-pipeline-exit-status:
|
||||
# This step is just so we can make github require this step, to pass checks
|
||||
# on a pull request instead of requiring all
|
||||
name: Set the ${{ github.workflow }} Pipeline Exit Status
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- pre-commit
|
||||
- docs
|
||||
- lint
|
||||
- build-deps-onedir
|
||||
- build-salt-onedir
|
||||
- build-pkgs
|
||||
- windows-2016
|
||||
- windows-2019
|
||||
- windows-2022
|
||||
- macos-12
|
||||
- almalinux-8
|
||||
- almalinux-9
|
||||
- amazonlinux-2
|
||||
- archlinux-lts
|
||||
- centos-7
|
||||
- centosstream-8
|
||||
- centosstream-9
|
||||
- debian-10
|
||||
- debian-11
|
||||
- debian-11-arm64
|
||||
- fedora-36
|
||||
- opensuse-15
|
||||
- photonos-3
|
||||
- photonos-4
|
||||
- ubuntu-1804
|
||||
- ubuntu-2004
|
||||
- ubuntu-2004-arm64
|
||||
- ubuntu-2204
|
||||
- ubuntu-2204-arm64
|
||||
steps:
|
||||
- name: Get workflow information
|
||||
id: get-workflow-info
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
|
||||
- name: Set Pipeline Exit Status
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Done
|
||||
if: always()
|
||||
run:
|
||||
echo "All worflows finished"
|
512
.github/workflows/templates/ci.yml.j2
vendored
Normal file
512
.github/workflows/templates/ci.yml.j2
vendored
Normal file
|
@ -0,0 +1,512 @@
|
|||
<%- extends 'layout.yml.j2' %>
|
||||
|
||||
<%- block on %>
|
||||
on:
|
||||
push: {}
|
||||
pull_request: {}
|
||||
<%- endblock on %>
|
||||
|
||||
<%- block jobs %>
|
||||
<{- super() }>
|
||||
|
||||
pre-commit:
|
||||
<%- do conclusion_needs.append('pre-commit') %>
|
||||
name: Pre-Commit
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
uses: ./.github/workflows/pre-commit-action.yml
|
||||
needs:
|
||||
- prepare-workflow
|
||||
with:
|
||||
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
|
||||
|
||||
docs:
|
||||
<%- do conclusion_needs.append('docs') %>
|
||||
name: Build Docs
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
uses: ./.github/workflows/docs-action.yml
|
||||
needs:
|
||||
- prepare-workflow
|
||||
with:
|
||||
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
|
||||
|
||||
lint:
|
||||
<%- do conclusion_needs.append('lint') %>
|
||||
name: Lint
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
uses: ./.github/workflows/lint-action.yml
|
||||
needs:
|
||||
- prepare-workflow
|
||||
with:
|
||||
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
|
||||
|
||||
prepare-release:
|
||||
name: Prepare Release
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
uses: ./.github/workflows/prepare-release.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
build-source-tarball:
|
||||
name: Build Source Tarball
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- prepare-release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
- name: Build Source Tarball
|
||||
uses: ./.github/actions/build-source-tarball
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
|
||||
build-deps-onedir:
|
||||
<%- do conclusion_needs.append('build-deps-onedir') %>
|
||||
name: Build Dependencies Onedir
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
uses: ./.github/workflows/build-deps-onedir.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
build-salt-onedir:
|
||||
<%- do conclusion_needs.append('build-salt-onedir') %>
|
||||
name: Build Salt Onedir
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-deps-onedir
|
||||
- build-source-tarball
|
||||
uses: ./.github/workflows/build-salt-onedir.yml
|
||||
with:
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
|
||||
build-pkgs:
|
||||
<%- do conclusion_needs.append('build-pkgs') %>
|
||||
name: Build Salt Packages
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/build-packages.yml
|
||||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
|
||||
windows-2016:
|
||||
<%- do conclusion_needs.append('windows-2016') %>
|
||||
name: Windows 2016
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2016
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
windows-2019:
|
||||
<%- do conclusion_needs.append('windows-2019') %>
|
||||
name: Windows 2019
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2019
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
windows-2022:
|
||||
<%- do conclusion_needs.append('windows-2022') %>
|
||||
name: Windows 2022
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: windows-2022
|
||||
nox-session: ci-test-onedir
|
||||
platform: windows
|
||||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
macos-12:
|
||||
<%- do conclusion_needs.append('macos-12') %>
|
||||
name: macOS 12
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action-macos.yml
|
||||
with:
|
||||
distro-slug: macos-12
|
||||
nox-session: ci-test-onedir
|
||||
platform: darwin
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
almalinux-8:
|
||||
<%- do conclusion_needs.append('almalinux-8') %>
|
||||
name: Alma Linux 8
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: almalinux-8
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
almalinux-9:
|
||||
<%- do conclusion_needs.append('almalinux-9') %>
|
||||
name: Alma Linux 9
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: almalinux-9
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
amazonlinux-2:
|
||||
<%- do conclusion_needs.append('amazonlinux-2') %>
|
||||
name: Amazon Linux 2
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: amazonlinux-2
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
archlinux-lts:
|
||||
<%- do conclusion_needs.append('archlinux-lts') %>
|
||||
name: Arch Linux LTS
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: archlinux-lts
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
centos-7:
|
||||
<%- do conclusion_needs.append('centos-7') %>
|
||||
name: CentOS 7
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centos-7
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
centosstream-8:
|
||||
<%- do conclusion_needs.append('centosstream-8') %>
|
||||
name: CentOS Stream 8
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centosstream-8
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
centosstream-9:
|
||||
<%- do conclusion_needs.append('centosstream-9') %>
|
||||
name: CentOS Stream 9
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: centosstream-9
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
debian-10:
|
||||
<%- do conclusion_needs.append('debian-10') %>
|
||||
name: Debian 10
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-10
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
debian-11:
|
||||
<%- do conclusion_needs.append('debian-11') %>
|
||||
name: Debian 11
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-11
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
debian-11-arm64:
|
||||
<%- do conclusion_needs.append('debian-11-arm64') %>
|
||||
name: Debian 11 Arm64
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: debian-11-arm64
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
fedora-36:
|
||||
<%- do conclusion_needs.append('fedora-36') %>
|
||||
name: Fedora 36
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
opensuse-15:
|
||||
<%- do conclusion_needs.append('opensuse-15') %>
|
||||
name: Opensuse 15
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: opensuse-15
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
photonos-3:
|
||||
<%- do conclusion_needs.append('photonos-3') %>
|
||||
name: Photon OS 3
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: photonos-3
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
photonos-4:
|
||||
<%- do conclusion_needs.append('photonos-4') %>
|
||||
name: Photon OS 4
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: photonos-4
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-1804:
|
||||
<%- do conclusion_needs.append('ubuntu-1804') %>
|
||||
name: Ubuntu 18.04
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-18.04
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2004:
|
||||
<%- do conclusion_needs.append('ubuntu-2004') %>
|
||||
name: Ubuntu 20.04
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-20.04
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2004-arm64:
|
||||
<%- do conclusion_needs.append('ubuntu-2004-arm64') %>
|
||||
name: Ubuntu 20.04 Arm64
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-20.04-arm64
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2204:
|
||||
<%- do conclusion_needs.append('ubuntu-2204') %>
|
||||
name: Ubuntu 22.04
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-22.04
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
|
||||
ubuntu-2204-arm64:
|
||||
<%- do conclusion_needs.append('ubuntu-2204-arm64') %>
|
||||
name: Ubuntu 22.04 Arm64
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: ubuntu-22.04-arm64
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
|
||||
<%- endblock jobs %>
|
229
.github/workflows/templates/layout.yml.j2
vendored
Normal file
229
.github/workflows/templates/layout.yml.j2
vendored
Normal file
|
@ -0,0 +1,229 @@
|
|||
# Do not edit these workflows directly as the changes made will be overwritten.
|
||||
# Instead, edit the template '<{ template }>'
|
||||
---
|
||||
name: <{ workflow_name }>
|
||||
|
||||
<%- block on %>
|
||||
|
||||
on:
|
||||
push: {}
|
||||
pull_request: {}
|
||||
|
||||
<%- endblock on %>
|
||||
|
||||
<%- block env %>
|
||||
|
||||
env:
|
||||
COLUMNS: 160
|
||||
CACHE_SEED: SEED-4 # Bump the number to invalidate all caches
|
||||
RELENV_DATA: "${{ github.workspace }}/.relenv"
|
||||
|
||||
<%- endblock env %>
|
||||
|
||||
<%- block permissions %>
|
||||
|
||||
permissions:
|
||||
contents: read # for dorny/paths-filter to fetch a list of changed files
|
||||
pull-requests: read # for dorny/paths-filter to read pull requests
|
||||
|
||||
<%- endblock permissions %>
|
||||
|
||||
<%- block concurrency %>
|
||||
|
||||
concurrency:
|
||||
# Concurrency is defined in a way that concurrent builds against branches do
|
||||
# not cancel previous builds.
|
||||
# However, for every new build against the same pull request source branch,
|
||||
# all older builds against that same branch get canceled.
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
<%- endblock concurrency %>
|
||||
|
||||
<%- block jobs %>
|
||||
|
||||
jobs:
|
||||
|
||||
<%- block prepare_workflow_job %>
|
||||
<%- do conclusion_needs.append('prepare-workflow') %>
|
||||
prepare-workflow:
|
||||
name: Prepare Workflow Run
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
jobs: ${{ steps.define-jobs.outputs.jobs }}
|
||||
changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
|
||||
testrun: ${{ steps.define-testrun.outputs.testrun }}
|
||||
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
|
||||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Full clone to also get the tags to get the right salt version
|
||||
|
||||
- name: Get Changed Files
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
id: changed-files
|
||||
uses: dorny/paths-filter@v2
|
||||
with:
|
||||
token: ${{ github.token }}
|
||||
list-files: json
|
||||
filters: |
|
||||
repo:
|
||||
- added|modified:
|
||||
- '**'
|
||||
doc-requirements:
|
||||
- added|modified: &doc_requirements
|
||||
- requirements/static/ci/py3.*/docs.txt
|
||||
lint-requirements:
|
||||
- added|modified: &lint_requirements
|
||||
- requirements/static/ci/py3.*/lint.txt
|
||||
pkg_requirements:
|
||||
- added|modified: &pkg_requirements
|
||||
- requirements/static/pkg/py3.*/darwin.txt
|
||||
- requirements/static/pkg/py3.*/linux.txt
|
||||
- requirements/static/pkg/py3.*/freebsd.txt
|
||||
- requirements/static/pkg/py3.*/windows.txt
|
||||
test_requirements:
|
||||
- added|modified: &test_requirements
|
||||
- requirements/static/ci/py3.*/darwin.txt
|
||||
- requirements/static/ci/py3.*/linux.txt
|
||||
- requirements/static/ci/py3.*/freebsd.txt
|
||||
- requirements/static/ci/py3.*/windows.txt
|
||||
- requirements/static/ci/py3.*/darwin-crypto.txt
|
||||
- requirements/static/ci/py3.*/linux-crypto.txt
|
||||
- requirements/static/ci/py3.*/freebsd-crypto.txt
|
||||
- requirements/static/ci/py3.*/windows-crypto.txt
|
||||
deleted:
|
||||
- deleted:
|
||||
- '**'
|
||||
docs:
|
||||
- added|modified:
|
||||
- doc/**
|
||||
- *doc_requirements
|
||||
salt:
|
||||
- added|modified: &salt_added_modified
|
||||
- setup.py
|
||||
- noxfile.py
|
||||
- salt/**/*.py
|
||||
- tasks/**/*.py
|
||||
- tools/**/*.py
|
||||
tests:
|
||||
- added|modified: &tests_added_modified
|
||||
- tests/**/*.py
|
||||
lint:
|
||||
- added|modified:
|
||||
- .pylintrc
|
||||
- *lint_requirements
|
||||
golden_images:
|
||||
- added|modified:
|
||||
- cicd/golden-images.json
|
||||
testrun:
|
||||
- added|modified:
|
||||
- *salt_added_modified
|
||||
- *tests_added_modified
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Setup Python Tools Scripts
|
||||
uses: ./.github/actions/setup-python-tools-scripts
|
||||
|
||||
- name: Pretty Print The GH Actions Event
|
||||
run:
|
||||
tools ci print-gh-event
|
||||
|
||||
- name: Setup Salt Version
|
||||
id: setup-salt-version
|
||||
uses: ./.github/actions/setup-salt-version
|
||||
with:
|
||||
salt-version: ""
|
||||
|
||||
- name: Write Changed Files To A Local File
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
run:
|
||||
echo '${{ toJSON(steps.changed-files.outputs) }}' > changed-files.json
|
||||
|
||||
- name: Check Local Changed Files Contents
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
run:
|
||||
cat changed-files.json
|
||||
|
||||
- name: Process Changed Files
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
id: process-changed-files
|
||||
run:
|
||||
tools ci process-changed-files ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Collected Changed Files
|
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'push'}}
|
||||
run:
|
||||
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
|
||||
|
||||
- name: Define Jobs To Run
|
||||
id: define-jobs
|
||||
run:
|
||||
tools ci define-jobs ${{ github.event_name }}
|
||||
|
||||
- name: Check Collected Jobs
|
||||
run:
|
||||
echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.'
|
||||
|
||||
- name: Define Testrun
|
||||
id: define-testrun
|
||||
run:
|
||||
tools ci define-testrun ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Defined Test Run
|
||||
run:
|
||||
echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.'
|
||||
|
||||
- name: Check Contents of generated testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
run:
|
||||
cat testrun-changed-files.txt || true
|
||||
|
||||
- name: Upload testrun-changed-files.txt
|
||||
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: testrun-changed-files.txt
|
||||
path: testrun-changed-files.txt
|
||||
|
||||
- name: Set Cache Seed Output
|
||||
id: set-cache-seed
|
||||
run: |
|
||||
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
|
||||
<%- endblock prepare_workflow_job %>
|
||||
|
||||
<%- endblock jobs %>
|
||||
|
||||
set-pipeline-exit-status:
|
||||
# This step is just so we can make github require this step, to pass checks
|
||||
# on a pull request instead of requiring all
|
||||
name: Set the ${{ github.workflow }} Pipeline Exit Status
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
<%- for need in conclusion_needs %>
|
||||
- <{ need }>
|
||||
<%- endfor %>
|
||||
steps:
|
||||
- name: Get workflow information
|
||||
id: get-workflow-info
|
||||
uses: technote-space/workflow-conclusion-action@v3
|
||||
|
||||
- name: Set Pipeline Exit Status
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Done
|
||||
if: always()
|
||||
run:
|
||||
echo "All worflows finished"
|
16
.github/workflows/templates/scheduled.yml.j2
vendored
Normal file
16
.github/workflows/templates/scheduled.yml.j2
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
<%- extends 'ci.yml.j2' %>
|
||||
|
||||
<%- block on %>
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
|
||||
- cron: '0 */8 * * *' # Run every 8 hours
|
||||
<%- endblock %>
|
||||
|
||||
<%- block concurrency %>
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: false
|
||||
<%- endblock %>
|
|
@ -1,8 +1,38 @@
|
|||
default_language_version:
|
||||
python: python3
|
||||
python: python3
|
||||
|
||||
exclude: ^(doc/_static/.*|doc/_themes/.*)$
|
||||
repos:
|
||||
|
||||
- repo: https://github.com/s0undt3ch/python-tools-scripts
|
||||
rev: "0.10.1"
|
||||
hooks:
|
||||
- id: tools
|
||||
alias: generate-workflows
|
||||
name: Generate GitHub Workflow Templates
|
||||
files: ^.github/workflows/templates/.*$
|
||||
pass_filenames: false
|
||||
args:
|
||||
- pre-commit
|
||||
- generate-workflows
|
||||
additional_dependencies:
|
||||
- boto3==1.21.46
|
||||
- pyyaml==6.0
|
||||
- jinja2==3.1.2
|
||||
- id: tools
|
||||
alias: actionlint
|
||||
name: Lint GitHub Actions Workflows
|
||||
files: "^.github/workflows/"
|
||||
types:
|
||||
- yaml
|
||||
args:
|
||||
- pre-commit
|
||||
- actionlint
|
||||
additional_dependencies:
|
||||
- boto3==1.21.46
|
||||
- pyyaml==6.0
|
||||
- jinja2==3.1.2
|
||||
|
||||
- repo: https://github.com/saltstack/pip-tools-compile-impersonate
|
||||
rev: "4.6"
|
||||
hooks:
|
||||
|
@ -1126,7 +1156,7 @@ repos:
|
|||
- id: bandit
|
||||
alias: bandit-salt
|
||||
name: Run bandit against Salt
|
||||
args: [--silent, -lll, --skip, B701]
|
||||
args: [--silent, -lll, --skip, "B701,B324"]
|
||||
exclude: >
|
||||
(?x)^(
|
||||
templates/.*|
|
||||
|
@ -1140,7 +1170,7 @@ repos:
|
|||
- id: bandit
|
||||
alias: bandit-tests
|
||||
name: Run bandit against the test suite
|
||||
args: [--silent, -lll, --skip, B701]
|
||||
args: [--silent, -lll, --skip, "B701,B324"]
|
||||
files: ^tests/.*
|
||||
exclude: ^tests/minionswarm\.py
|
||||
additional_dependencies: ['importlib_metadata<5']
|
||||
|
|
1
changelog/58202.fixed
Normal file
1
changelog/58202.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Fix salt.modules.pip:is_installed doesn't handle locally installed packages
|
1
changelog/60557.fixed
Normal file
1
changelog/60557.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Fix regression pip.installed does not pass env_vars when calling pip.list
|
1
changelog/61789.fixed
Normal file
1
changelog/61789.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry
|
1
changelog/63590.fixed
Normal file
1
changelog/63590.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended.
|
1
changelog/63595.changed
Normal file
1
changelog/63595.changed
Normal file
|
@ -0,0 +1 @@
|
|||
Set enable_fqdns_grains to be False by default.
|
|
@ -1,15 +0,0 @@
|
|||
@Library('salt@master-1.11') _
|
||||
|
||||
if (env.CHANGE_ID) {
|
||||
// This is a PR
|
||||
pre_commit_skips = ''
|
||||
} else {
|
||||
// This is a branch build
|
||||
pre_commit_skips = ''
|
||||
}
|
||||
|
||||
runPreCommit(
|
||||
env: env,
|
||||
pre_commit_skips: pre_commit_skips)
|
||||
|
||||
// vim: ft=groovy
|
|
@ -2134,6 +2134,11 @@ worker_threads value.
|
|||
Worker threads should not be put below 3 when using the peer system, but can
|
||||
drop down to 1 worker otherwise.
|
||||
|
||||
Standards for busy environments:
|
||||
|
||||
* Use one worker thread per 200 minions.
|
||||
* The value of worker_threads should not exceed 1½ times the available CPU cores.
|
||||
|
||||
.. note::
|
||||
When the master daemon starts, it is expected behaviour to see
|
||||
multiple salt-master processes, even if 'worker_threads' is set to '1'. At
|
||||
|
|
|
@ -28,6 +28,8 @@ accept minions based on their ``uuid`` create a file named ``/etc/salt/autosign_
|
|||
8f7d68e2-30c5-40c6-b84a-df7e978a03ee
|
||||
1d3c5473-1fbc-479e-b0c7-877705a0730f
|
||||
|
||||
If already running, the master must be restarted for these config changes to take effect.
|
||||
|
||||
The master is now setup to accept minions with either of the two specified uuids.
|
||||
Multiple values must always be written into separate lines.
|
||||
Lines starting with a ``#`` are ignored.
|
||||
|
|
|
@ -258,7 +258,6 @@ pkg:
|
|||
- *ci_windows_file_patterns
|
||||
- "**/Scripts/py.exe"
|
||||
- "**/Scripts/pyw.exe"
|
||||
- "**/Scripts/pythonw.exe"
|
||||
- "**/Scripts/venvlauncher.exe"
|
||||
- "**/Scripts/venvwlauncher.exe"
|
||||
- "**/Scripts/wheel*"
|
||||
|
|
|
@ -77,13 +77,65 @@ fi
|
|||
# This is a special tool to make it easier for the user to get started setting
|
||||
# up salt
|
||||
log "Symlink: Creating symlink for salt-config..."
|
||||
ln -sf "$BIN_DIR/salt-config.sh" "$SBIN_DIR/salt-config"
|
||||
ln -sf "$INSTALL_DIR/salt-config.sh" "$SBIN_DIR/salt-config"
|
||||
if [ -f "$SBIN_DIR/salt-config" ]; then
|
||||
log "Symlink: Created Successfully"
|
||||
else
|
||||
log "Symlink: Failed to create symlink"
|
||||
fi
|
||||
|
||||
log "Symlink: Creating symlinks for salt..."
|
||||
ln -sf "$INSTALL_DIR/salt" "$SBIN_DIR/salt"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-api..."
|
||||
ln -sf "$INSTALL_DIR/salt-api" "$SBIN_DIR/salt-api"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-call..."
|
||||
ln -sf "$INSTALL_DIR/salt-call" "$SBIN_DIR/salt-call"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-cloud..."
|
||||
ln -sf "$INSTALL_DIR/salt-cloud" "$SBIN_DIR/salt-cloud"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-cp..."
|
||||
ln -sf "$INSTALL_DIR/salt-cp" "$SBIN_DIR/salt-cp"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-key..."
|
||||
ln -sf "$INSTALL_DIR/salt-key" "$SBIN_DIR/salt-key"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-master..."
|
||||
ln -sf "$INSTALL_DIR/salt-master" "$SBIN_DIR/salt-master"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-minion..."
|
||||
ln -sf "$INSTALL_DIR/salt-minion" "$SBIN_DIR/salt-minion"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-proxy..."
|
||||
ln -sf "$INSTALL_DIR/salt-proxy" "$SBIN_DIR/salt-proxy"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-run..."
|
||||
ln -sf "$INSTALL_DIR/salt-run" "$SBIN_DIR/salt-run"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for spm..."
|
||||
ln -sf "$INSTALL_DIR/spm" "$SBIN_DIR/spm"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-ssh..."
|
||||
ln -sf "$INSTALL_DIR/salt-ssh" "$SBIN_DIR/salt-ssh"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
log "Symlink: Creating symlinks for salt-syndic..."
|
||||
ln -sf "$INSTALL_DIR/salt-syndic" "$SBIN_DIR/salt-syndic"
|
||||
log "Symlink: Created Successfully"
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Add salt to paths.d
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
|
@ -193,7 +193,6 @@ if ( $PKG ) {
|
|||
$binaries = @(
|
||||
"py.exe",
|
||||
"pyw.exe",
|
||||
"pythonw.exe",
|
||||
"venvlauncher.exe",
|
||||
"venvwlauncher.exe"
|
||||
)
|
||||
|
|
|
@ -16,15 +16,19 @@ botocore==1.24.46
|
|||
# s3transfer
|
||||
commonmark==0.9.1
|
||||
# via rich
|
||||
jinja2==3.1.2
|
||||
# via -r requirements/static/ci/tools.in
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
markupsafe==2.1.2
|
||||
# via jinja2
|
||||
pygments==2.13.0
|
||||
# via rich
|
||||
python-dateutil==2.8.2
|
||||
# via botocore
|
||||
python-tools-scripts==0.9.7
|
||||
python-tools-scripts==0.10.1
|
||||
# via -r requirements/static/ci/tools.in
|
||||
pyyaml==6.0
|
||||
# via -r requirements/static/ci/tools.in
|
||||
|
@ -34,5 +38,7 @@ s3transfer==0.5.2
|
|||
# via boto3
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
typing-extensions==4.4.0
|
||||
# via python-tools-scripts
|
||||
urllib3==1.26.12
|
||||
# via botocore
|
||||
|
|
|
@ -16,15 +16,19 @@ botocore==1.24.46
|
|||
# s3transfer
|
||||
commonmark==0.9.1
|
||||
# via rich
|
||||
jinja2==3.1.2
|
||||
# via -r requirements/static/ci/tools.in
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
markupsafe==2.1.2
|
||||
# via jinja2
|
||||
pygments==2.13.0
|
||||
# via rich
|
||||
python-dateutil==2.8.2
|
||||
# via botocore
|
||||
python-tools-scripts==0.9.7
|
||||
python-tools-scripts==0.10.1
|
||||
# via -r requirements/static/ci/tools.in
|
||||
pyyaml==6.0
|
||||
# via -r requirements/static/ci/tools.in
|
||||
|
@ -34,5 +38,7 @@ s3transfer==0.5.2
|
|||
# via boto3
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
typing-extensions==4.4.0
|
||||
# via python-tools-scripts
|
||||
urllib3==1.26.12
|
||||
# via botocore
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
python-tools-scripts >= 0.9.7
|
||||
python-tools-scripts >= 0.10.1
|
||||
attrs
|
||||
boto3
|
||||
pyyaml
|
||||
jinja2
|
||||
|
|
|
@ -67,7 +67,7 @@ elif salt.utils.platform.is_proxy():
|
|||
_MASTER_USER = salt.utils.user.get_user()
|
||||
else:
|
||||
_DFLT_IPC_MODE = "ipc"
|
||||
_DFLT_FQDNS_GRAINS = True
|
||||
_DFLT_FQDNS_GRAINS = False
|
||||
_MASTER_TRIES = 1
|
||||
_MASTER_USER = salt.utils.user.get_user()
|
||||
|
||||
|
|
|
@ -1,7 +1,15 @@
|
|||
"""
|
||||
Generate baseline proxy minion grains for ESXi hosts.
|
||||
|
||||
.. versionadded:: 2015.8.4
|
||||
.. Warning::
|
||||
This module will be deprecated in a future release of Salt. VMware strongly
|
||||
recommends using the
|
||||
`VMware Salt extensions <https://docs.saltproject.io/salt/extensions/salt-ext-modules-vmware/en/latest/all.html>`_
|
||||
instead of the ESXi module. Because the Salt extensions are newer and
|
||||
actively supported by VMware, they are more compatible with current versions
|
||||
of ESXi and they work well with the latest features in the VMware product
|
||||
line.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
@ -220,22 +220,7 @@ def del_password(name):
|
|||
def set_password(name, password):
|
||||
"""
|
||||
Set the password for a named user. The password must be a properly defined
|
||||
hash. The password hash can be generated with this command:
|
||||
|
||||
``python -c "import crypt; print crypt.crypt('password', ciphersalt)"``
|
||||
|
||||
.. note::
|
||||
When constructing the ``ciphersalt`` string, you must escape any dollar
|
||||
signs, to avoid them being interpolated by the shell.
|
||||
|
||||
``'password'`` is, of course, the password for which you want to generate
|
||||
a hash.
|
||||
|
||||
``ciphersalt`` is a combination of a cipher identifier, an optional number
|
||||
of rounds, and the cryptographic salt. The arrangement and format of these
|
||||
fields depends on the cipher and which flavor of BSD you are using. For
|
||||
more information on this, see the manpage for ``crpyt(3)``. On NetBSD,
|
||||
additional information is available in ``passwd.conf(5)``.
|
||||
hash. A password hash can be generated with :py:func:`gen_password`.
|
||||
|
||||
It is important to make sure that a supported cipher is used.
|
||||
|
||||
|
|
|
@ -256,6 +256,34 @@ def _check_avail(cmd):
|
|||
return bret and wret
|
||||
|
||||
|
||||
def _prep_powershell_cmd(shell, cmd, stack, encoded_cmd):
|
||||
"""
|
||||
Prep cmd when shell is powershell
|
||||
"""
|
||||
|
||||
# If this is running on Windows wrap
|
||||
# the shell in quotes in case there are
|
||||
# spaces in the paths.
|
||||
if salt.utils.platform.is_windows():
|
||||
shell = '"{}"'.format(shell)
|
||||
|
||||
# extract_stack() returns a list of tuples.
|
||||
# The last item in the list [-1] is the current method.
|
||||
# The third item[2] in each tuple is the name of that method.
|
||||
if stack[-2][2] == "script":
|
||||
cmd = (
|
||||
"{} -NonInteractive -NoProfile -ExecutionPolicy Bypass -Command {}".format(
|
||||
shell, cmd
|
||||
)
|
||||
)
|
||||
elif encoded_cmd:
|
||||
cmd = "{} -NonInteractive -NoProfile -EncodedCommand {}".format(shell, cmd)
|
||||
else:
|
||||
cmd = '{} -NonInteractive -NoProfile -Command "{}"'.format(shell, cmd)
|
||||
|
||||
return cmd
|
||||
|
||||
|
||||
def _run(
|
||||
cmd,
|
||||
cwd=None,
|
||||
|
@ -368,19 +396,7 @@ def _run(
|
|||
# Else just run a Powershell command.
|
||||
stack = traceback.extract_stack(limit=2)
|
||||
|
||||
# extract_stack() returns a list of tuples.
|
||||
# The last item in the list [-1] is the current method.
|
||||
# The third item[2] in each tuple is the name of that method.
|
||||
if stack[-2][2] == "script":
|
||||
cmd = '"{}" -NonInteractive -NoProfile -ExecutionPolicy Bypass -Command {}'.format(
|
||||
shell, cmd
|
||||
)
|
||||
elif encoded_cmd:
|
||||
cmd = '"{}" -NonInteractive -NoProfile -EncodedCommand {}'.format(
|
||||
shell, cmd
|
||||
)
|
||||
else:
|
||||
cmd = '"{}" -NonInteractive -NoProfile -Command "{}"'.format(shell, cmd)
|
||||
cmd = _prep_powershell_cmd(shell, cmd, stack, encoded_cmd)
|
||||
|
||||
# munge the cmd and cwd through the template
|
||||
(cmd, cwd) = _render_cmd(cmd, cwd, template, saltenv, pillarenv, pillar_override)
|
||||
|
|
|
@ -2,7 +2,15 @@
|
|||
Glues the VMware vSphere Execution Module to the VMware ESXi Proxy Minions to the
|
||||
:mod:`esxi proxymodule <salt.proxy.esxi>`.
|
||||
|
||||
.. versionadded:: 2015.8.4
|
||||
.. Warning::
|
||||
This module will be deprecated in a future release of Salt. VMware strongly
|
||||
recommends using the
|
||||
`VMware Salt extensions <https://docs.saltproject.io/salt/extensions/salt-ext-modules-vmware/en/latest/all.html>`_
|
||||
instead of the ESXi module. Because the Salt extensions are newer and
|
||||
actively supported by VMware, they are more compatible with current versions
|
||||
of ESXi and they work well with the latest features in the VMware product
|
||||
line.
|
||||
|
||||
|
||||
Depends: :mod:`vSphere Remote Execution Module (salt.modules.vsphere)
|
||||
<salt.modules.vsphere>`
|
||||
|
|
|
@ -339,16 +339,7 @@ def unlock_password(name, root=None):
|
|||
def set_password(name, password, use_usermod=False, root=None):
|
||||
"""
|
||||
Set the password for a named user. The password must be a properly defined
|
||||
hash. The password hash can be generated with this command:
|
||||
|
||||
``python -c "import crypt; print crypt.crypt('password',
|
||||
'\\$6\\$SALTsalt')"``
|
||||
|
||||
``SALTsalt`` is the 8-character crpytographic salt. Valid characters in the
|
||||
salt are ``.``, ``/``, and any alphanumeric character.
|
||||
|
||||
Keep in mind that the $6 represents a sha512 hash, if your OS is using a
|
||||
different hashing algorithm this needs to be changed accordingly
|
||||
hash. A password hash can be generated with :py:func:`gen_password`.
|
||||
|
||||
name
|
||||
User to set the password
|
||||
|
|
|
@ -1233,8 +1233,12 @@ def freeze(bin_env=None, user=None, cwd=None, use_vt=False, env_vars=None, **kwa
|
|||
return result["stdout"].splitlines()
|
||||
|
||||
|
||||
def list_(prefix=None, bin_env=None, user=None, cwd=None, env_vars=None, **kwargs):
|
||||
def list_freeze_parse(
|
||||
prefix=None, bin_env=None, user=None, cwd=None, env_vars=None, **kwargs
|
||||
):
|
||||
"""
|
||||
.. versionadded:: 3006.0
|
||||
|
||||
Filter list of installed apps from ``freeze`` and check to see if
|
||||
``prefix`` exists in the list of packages installed.
|
||||
|
||||
|
@ -1250,7 +1254,7 @@ def list_(prefix=None, bin_env=None, user=None, cwd=None, env_vars=None, **kwarg
|
|||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pip.list salt
|
||||
salt '*' pip.list_freeze_parse salt
|
||||
"""
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
|
@ -1299,6 +1303,73 @@ def list_(prefix=None, bin_env=None, user=None, cwd=None, env_vars=None, **kwarg
|
|||
return packages
|
||||
|
||||
|
||||
def list_(prefix=None, bin_env=None, user=None, cwd=None, env_vars=None, **kwargs):
|
||||
"""
|
||||
.. versionchanged:: 3006.0
|
||||
|
||||
Output list of installed apps from ``pip list`` in JSON format and check to
|
||||
see if ``prefix`` exists in the list of packages installed.
|
||||
|
||||
.. note::
|
||||
|
||||
If the version of pip available is older than 9.0.0, parsing the
|
||||
``freeze`` function output will be used to determine the name and
|
||||
version of installed modules.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pip.list salt
|
||||
"""
|
||||
|
||||
packages = {}
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
cur_version = version(bin_env, cwd, user=user)
|
||||
|
||||
# Pip started supporting the ability to output json starting with 9.0.0
|
||||
min_version = "9.0"
|
||||
if salt.utils.versions.compare(ver1=cur_version, oper="<", ver2=min_version):
|
||||
return list_freeze_parse(
|
||||
prefix=prefix,
|
||||
bin_env=bin_env,
|
||||
user=user,
|
||||
cwd=cwd,
|
||||
env_vars=env_vars,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
cmd = _get_pip_bin(bin_env)
|
||||
cmd.extend(["list", "--format=json"])
|
||||
|
||||
cmd_kwargs = dict(cwd=cwd, runas=user, python_shell=False)
|
||||
if kwargs:
|
||||
cmd_kwargs.update(**kwargs)
|
||||
if bin_env and os.path.isdir(bin_env):
|
||||
cmd_kwargs["env"] = {"VIRTUAL_ENV": bin_env}
|
||||
if env_vars:
|
||||
cmd_kwargs.setdefault("env", {}).update(_format_env_vars(env_vars))
|
||||
|
||||
result = __salt__["cmd.run_all"](cmd, **cmd_kwargs)
|
||||
|
||||
if result["retcode"]:
|
||||
raise CommandExecutionError(result["stderr"], info=result)
|
||||
|
||||
try:
|
||||
pkgs = salt.utils.json.loads(result["stdout"], strict=False)
|
||||
except ValueError:
|
||||
raise CommandExecutionError("Invalid JSON", info=result)
|
||||
|
||||
for pkg in pkgs:
|
||||
if prefix:
|
||||
if pkg["name"].lower().startswith(prefix.lower()):
|
||||
packages[pkg["name"]] = pkg["version"]
|
||||
else:
|
||||
packages[pkg["name"]] = pkg["version"]
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def version(bin_env=None, cwd=None, user=None):
|
||||
"""
|
||||
.. versionadded:: 0.17.0
|
||||
|
@ -1421,19 +1492,13 @@ def list_upgrades(bin_env=None, user=None, cwd=None):
|
|||
return packages
|
||||
|
||||
|
||||
def is_installed(pkgname=None, bin_env=None, user=None, cwd=None):
|
||||
def is_installed(pkgname, bin_env=None, user=None, cwd=None):
|
||||
"""
|
||||
.. versionadded:: 2018.3.0
|
||||
.. versionchanged:: 3006.0
|
||||
|
||||
Filter list of installed apps from ``freeze`` and return True or False if
|
||||
``pkgname`` exists in the list of packages installed.
|
||||
|
||||
.. note::
|
||||
If the version of pip available is older than 8.0.3, the packages
|
||||
wheel, setuptools, and distribute will not be reported by this function
|
||||
even if they are installed. Unlike :py:func:`pip.freeze
|
||||
<salt.modules.pip.freeze>`, this function always reports the version of
|
||||
pip which is installed.
|
||||
Filter list of installed modules and return True if ``pkgname`` exists in
|
||||
the list of packages installed.
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
@ -1443,30 +1508,11 @@ def is_installed(pkgname=None, bin_env=None, user=None, cwd=None):
|
|||
"""
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
for line in freeze(bin_env=bin_env, user=user, cwd=cwd):
|
||||
if line.startswith("-f") or line.startswith("#"):
|
||||
# ignore -f line as it contains --find-links directory
|
||||
# ignore comment lines
|
||||
continue
|
||||
elif line.startswith("-e hg+not trust"):
|
||||
# ignore hg + not trust problem
|
||||
continue
|
||||
elif line.startswith("-e"):
|
||||
line = line.split("-e ")[1]
|
||||
version_, name = line.split("#egg=")
|
||||
elif len(line.split("===")) >= 2:
|
||||
name = line.split("===")[0]
|
||||
version_ = line.split("===")[1]
|
||||
elif len(line.split("==")) >= 2:
|
||||
name = line.split("==")[0]
|
||||
version_ = line.split("==")[1]
|
||||
else:
|
||||
logger.error("Can't parse line '%s'", line)
|
||||
continue
|
||||
pkgs = list_(prefix=pkgname, bin_env=bin_env, user=user, cwd=cwd)
|
||||
|
||||
if pkgname:
|
||||
if pkgname == name.lower():
|
||||
return True
|
||||
for pkg in pkgs:
|
||||
if pkg.lower() == pkgname.lower():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
|
|
@ -1,7 +1,15 @@
|
|||
"""
|
||||
Proxy Minion interface module for managing VMware ESXi hosts.
|
||||
|
||||
.. versionadded:: 2015.8.4
|
||||
.. Warning::
|
||||
This module will be deprecated in a future release of Salt. VMware strongly
|
||||
recommends using the
|
||||
`VMware Salt extensions <https://docs.saltproject.io/salt/extensions/salt-ext-modules-vmware/en/latest/all.html>`_
|
||||
instead of the ESXi module. Because the Salt extensions are newer and
|
||||
actively supported by VMware, they are more compatible with current versions
|
||||
of ESXi and they work well with the latest features in the VMware product
|
||||
line.
|
||||
|
||||
|
||||
**Special Note: SaltStack thanks** `Adobe Corporation <http://adobe.com/>`_
|
||||
**for their support in creating this Proxy Minion integration.**
|
||||
|
|
|
@ -1,7 +1,15 @@
|
|||
"""
|
||||
Manage VMware ESXi Hosts.
|
||||
|
||||
.. versionadded:: 2015.8.4
|
||||
.. Warning::
|
||||
This module will be deprecated in a future release of Salt. VMware strongly
|
||||
recommends using the
|
||||
`VMware Salt extensions <https://docs.saltproject.io/salt/extensions/salt-ext-modules-vmware/en/latest/all.html>`_
|
||||
instead of the ESXi module. Because the Salt extensions are newer and
|
||||
actively supported by VMware, they are more compatible with current versions
|
||||
of ESXi and they work well with the latest features in the VMware product
|
||||
line.
|
||||
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
|
|
@ -845,9 +845,11 @@ def installed(
|
|||
# No requirements case.
|
||||
# Check pre-existence of the requested packages.
|
||||
else:
|
||||
# Attempt to pre-cache a the current pip list
|
||||
# Attempt to pre-cache the current pip list
|
||||
try:
|
||||
pip_list = __salt__["pip.list"](bin_env=bin_env, user=user, cwd=cwd)
|
||||
pip_list = __salt__["pip.list"](
|
||||
bin_env=bin_env, user=user, cwd=cwd, env_vars=env_vars
|
||||
)
|
||||
# If we fail, then just send False, and we'll try again in the next function call
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.exception(exc)
|
||||
|
|
|
@ -28,7 +28,6 @@ pairs.
|
|||
# When production windows installer is using Python 3, Python 2 code can be removed
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
|
@ -44,7 +43,6 @@ except ImportError:
|
|||
HAS_WINDOWS_MODULES = False
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the module's virtual name
|
||||
|
@ -198,6 +196,9 @@ def key_exists(hive, key, use_32bit_registry=False):
|
|||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
return False
|
||||
if exc.winerror == 5:
|
||||
# It exists, but we don't have permission to read it
|
||||
return True
|
||||
raise
|
||||
finally:
|
||||
if handle:
|
||||
|
@ -264,7 +265,8 @@ def value_exists(hive, key, vname, use_32bit_registry=False):
|
|||
# value/data pair not found
|
||||
return False
|
||||
finally:
|
||||
win32api.RegCloseKey(handle)
|
||||
if handle:
|
||||
win32api.RegCloseKey(handle)
|
||||
|
||||
|
||||
def broadcast_change():
|
||||
|
@ -348,20 +350,20 @@ def list_keys(hive, key=None, use_32bit_registry=False):
|
|||
|
||||
for i in range(win32api.RegQueryInfoKey(handle)[0]):
|
||||
subkey = win32api.RegEnumKey(handle, i)
|
||||
if PY2:
|
||||
subkeys.append(_to_mbcs(subkey))
|
||||
else:
|
||||
subkeys.append(subkey)
|
||||
subkeys.append(subkey)
|
||||
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
log.debug(r"Cannot find key: %s\%s", hive, key, exc_info=True)
|
||||
return False, r"Cannot find key: {}\{}".format(hive, key)
|
||||
if exc.winerror == 5:
|
||||
log.debug(r"Access is denied: %s\%s", hive, key, exc_info=True)
|
||||
return False, r"Access is denied: {}\{}".format(hive, key)
|
||||
raise
|
||||
|
||||
finally:
|
||||
if handle:
|
||||
handle.Close()
|
||||
win32api.RegCloseKey(handle)
|
||||
|
||||
return subkeys
|
||||
|
||||
|
@ -444,11 +446,14 @@ def list_values(hive, key=None, use_32bit_registry=False):
|
|||
if exc.winerror == 2:
|
||||
log.debug(r"Cannot find key: %s\%s", hive, key)
|
||||
return False, r"Cannot find key: {}\{}".format(hive, key)
|
||||
elif exc.winerror == 5:
|
||||
log.debug(r"Access is denied: %s\%s", hive, key)
|
||||
return False, r"Access is denied: {}\{}".format(hive, key)
|
||||
raise
|
||||
|
||||
finally:
|
||||
if handle:
|
||||
handle.Close()
|
||||
win32api.RegCloseKey(handle)
|
||||
return values
|
||||
|
||||
|
||||
|
@ -535,6 +540,7 @@ def read_value(hive, key, vname=None, use_32bit_registry=False):
|
|||
raise CommandExecutionError("Invalid Hive: {}".format(local_hive))
|
||||
access_mask = registry.registry_32[use_32bit_registry]
|
||||
|
||||
handle = None
|
||||
try:
|
||||
handle = win32api.RegOpenKeyEx(hkey, local_key, 0, access_mask)
|
||||
try:
|
||||
|
@ -572,8 +578,18 @@ def read_value(hive, key, vname=None, use_32bit_registry=False):
|
|||
log.trace(msg)
|
||||
ret["comment"] = msg
|
||||
ret["success"] = False
|
||||
elif exc.winerror == 5:
|
||||
msg = "Access is denied: {}\\{}".format(local_hive, local_key)
|
||||
log.trace(exc)
|
||||
log.trace(msg)
|
||||
ret["comment"] = msg
|
||||
ret["success"] = False
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
if handle:
|
||||
win32api.RegCloseKey(handle)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -617,7 +633,7 @@ def set_value(
|
|||
The type of data this parameter expects is determined by the value
|
||||
type specified in ``vtype``. The correspondence is as follows:
|
||||
|
||||
- REG_BINARY: Binary data (str in Py2, bytes in Py3)
|
||||
- REG_BINARY: Binary data (bytes)
|
||||
- REG_DWORD: int
|
||||
- REG_EXPAND_SZ: str
|
||||
- REG_MULTI_SZ: list of str
|
||||
|
@ -751,10 +767,11 @@ def set_value(
|
|||
|
||||
except win32api.error as exc:
|
||||
log.exception(
|
||||
"Error creating/opening key: %s\\%s\n%s",
|
||||
"Error creating/opening key: %s\\%s\n%s\n%s",
|
||||
local_hive,
|
||||
local_key,
|
||||
exc.winerror,
|
||||
exc.strerror,
|
||||
)
|
||||
return False
|
||||
|
||||
|
@ -929,7 +946,7 @@ def delete_key_recursive(hive, key, use_32bit_registry=False):
|
|||
ret["Failed"].append(r"{}\{} {}".format(hive, sub_key_path, exc.strerror))
|
||||
finally:
|
||||
if key_handle:
|
||||
win32api.CloseHandle(key_handle)
|
||||
win32api.RegCloseKey(key_handle)
|
||||
|
||||
broadcast_change()
|
||||
|
||||
|
|
|
@ -1056,3 +1056,59 @@ def test_runas_env_sudo_group(bundled):
|
|||
popen_mock.call_args_list[0][0][0]
|
||||
== exp_ret
|
||||
)
|
||||
|
||||
|
||||
def test_prep_powershell_cmd():
|
||||
"""
|
||||
Tests _prep_powershell_cmd returns correct cmd
|
||||
"""
|
||||
with patch("salt.utils.platform.is_windows", MagicMock(return_value=False)):
|
||||
stack = [["", "", ""], ["", "", ""], ["", "", ""]]
|
||||
ret = cmdmod._prep_powershell_cmd(
|
||||
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
|
||||
)
|
||||
assert ret == 'powershell -NonInteractive -NoProfile -Command "$PSVersionTable"'
|
||||
|
||||
ret = cmdmod._prep_powershell_cmd(
|
||||
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=True
|
||||
)
|
||||
assert (
|
||||
ret
|
||||
== "powershell -NonInteractive -NoProfile -EncodedCommand $PSVersionTable"
|
||||
)
|
||||
|
||||
stack = [["", "", ""], ["", "", "script"], ["", "", ""]]
|
||||
ret = cmdmod._prep_powershell_cmd(
|
||||
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
|
||||
)
|
||||
assert (
|
||||
ret
|
||||
== "powershell -NonInteractive -NoProfile -ExecutionPolicy Bypass -Command $PSVersionTable"
|
||||
)
|
||||
|
||||
with patch("salt.utils.platform.is_windows", MagicMock(return_value=True)):
|
||||
stack = [["", "", ""], ["", "", ""], ["", "", ""]]
|
||||
|
||||
ret = cmdmod._prep_powershell_cmd(
|
||||
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
|
||||
)
|
||||
assert (
|
||||
ret == '"powershell" -NonInteractive -NoProfile -Command "$PSVersionTable"'
|
||||
)
|
||||
|
||||
ret = cmdmod._prep_powershell_cmd(
|
||||
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=True
|
||||
)
|
||||
assert (
|
||||
ret
|
||||
== '"powershell" -NonInteractive -NoProfile -EncodedCommand $PSVersionTable'
|
||||
)
|
||||
|
||||
stack = [["", "", ""], ["", "", "script"], ["", "", ""]]
|
||||
ret = cmdmod._prep_powershell_cmd(
|
||||
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
|
||||
)
|
||||
assert (
|
||||
ret
|
||||
== '"powershell" -NonInteractive -NoProfile -ExecutionPolicy Bypass -Command $PSVersionTable'
|
||||
)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -1422,7 +1423,7 @@ def test_freeze_command_with_all():
|
|||
)
|
||||
|
||||
|
||||
def test_list_command():
|
||||
def test_list_freeze_parse_command():
|
||||
eggs = [
|
||||
"M2Crypto==0.21.1",
|
||||
"-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev",
|
||||
|
@ -1434,7 +1435,7 @@ def test_list_command():
|
|||
mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)})
|
||||
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
|
||||
with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)):
|
||||
ret = pip.list_()
|
||||
ret = pip.list_freeze_parse()
|
||||
expected = [sys.executable, "-m", "pip", "freeze"]
|
||||
mock.assert_called_with(
|
||||
expected,
|
||||
|
@ -1458,11 +1459,11 @@ def test_list_command():
|
|||
with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")):
|
||||
pytest.raises(
|
||||
CommandExecutionError,
|
||||
pip.list_,
|
||||
pip.list_freeze_parse,
|
||||
)
|
||||
|
||||
|
||||
def test_list_command_with_all():
|
||||
def test_list_freeze_parse_command_with_all():
|
||||
eggs = [
|
||||
"M2Crypto==0.21.1",
|
||||
"-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev",
|
||||
|
@ -1479,7 +1480,7 @@ def test_list_command_with_all():
|
|||
mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)})
|
||||
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
|
||||
with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)):
|
||||
ret = pip.list_()
|
||||
ret = pip.list_freeze_parse()
|
||||
expected = [sys.executable, "-m", "pip", "freeze", "--all"]
|
||||
mock.assert_called_with(
|
||||
expected,
|
||||
|
@ -1504,11 +1505,11 @@ def test_list_command_with_all():
|
|||
with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")):
|
||||
pytest.raises(
|
||||
CommandExecutionError,
|
||||
pip.list_,
|
||||
pip.list_freeze_parse,
|
||||
)
|
||||
|
||||
|
||||
def test_list_command_with_prefix():
|
||||
def test_list_freeze_parse_command_with_prefix():
|
||||
eggs = [
|
||||
"M2Crypto==0.21.1",
|
||||
"-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev",
|
||||
|
@ -1519,7 +1520,7 @@ def test_list_command_with_prefix():
|
|||
mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)})
|
||||
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
|
||||
with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")):
|
||||
ret = pip.list_(prefix="bb")
|
||||
ret = pip.list_freeze_parse(prefix="bb")
|
||||
expected = [sys.executable, "-m", "pip", "freeze"]
|
||||
mock.assert_called_with(
|
||||
expected,
|
||||
|
@ -1680,7 +1681,7 @@ def test_resolve_requirements_chain_function():
|
|||
def test_when_upgrade_is_called_and_there_are_available_upgrades_it_should_call_correct_command(
|
||||
expected_user,
|
||||
):
|
||||
fake_run_all = MagicMock(return_value={"retcode": 0, "stdout": ""})
|
||||
fake_run_all = MagicMock(return_value={"retcode": 0, "stdout": "{}"})
|
||||
pip_user = expected_user
|
||||
with patch.dict(pip.__salt__, {"cmd.run_all": fake_run_all}), patch(
|
||||
"salt.modules.pip.list_upgrades", autospec=True, return_value=[pip_user]
|
||||
|
@ -1692,7 +1693,7 @@ def test_when_upgrade_is_called_and_there_are_available_upgrades_it_should_call_
|
|||
pip.upgrade(user=pip_user)
|
||||
|
||||
fake_run_all.assert_any_call(
|
||||
["some-other-pip", "install", "-U", "freeze", "--all", pip_user],
|
||||
["some-other-pip", "install", "-U", "list", "--format=json", pip_user],
|
||||
runas=pip_user,
|
||||
cwd=None,
|
||||
use_vt=False,
|
||||
|
@ -1805,3 +1806,76 @@ def test_install_target_from_VENV_PIP_TARGET_in_resulting_command():
|
|||
use_vt=False,
|
||||
python_shell=False,
|
||||
)
|
||||
|
||||
|
||||
def test_list():
|
||||
json_out = dedent(
|
||||
"""
|
||||
[
|
||||
{
|
||||
"name": "idemenv",
|
||||
"version": "0.2.0",
|
||||
"editable_project_location": "/home/debian/idemenv"
|
||||
},
|
||||
{
|
||||
"name": "MarkupSafe",
|
||||
"version": "2.1.1"
|
||||
},
|
||||
{
|
||||
"name": "pip",
|
||||
"version": "22.3.1"
|
||||
},
|
||||
{
|
||||
"name": "pop",
|
||||
"version": "23.0.0"
|
||||
},
|
||||
{
|
||||
"name": "salt",
|
||||
"version": "3006.0+0na.5b18e86"
|
||||
},
|
||||
{
|
||||
"name": "typing_extensions",
|
||||
"version": "4.4.0"
|
||||
},
|
||||
{
|
||||
"name": "unattended-upgrades",
|
||||
"version": "0.1"
|
||||
},
|
||||
{
|
||||
"name": "yarl",
|
||||
"version": "1.8.2"
|
||||
}
|
||||
]
|
||||
"""
|
||||
)
|
||||
mock_version = "22.3.1"
|
||||
mock = MagicMock(return_value={"retcode": 0, "stdout": json_out})
|
||||
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
|
||||
with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)):
|
||||
ret = pip.list_()
|
||||
expected = [sys.executable, "-m", "pip", "list", "--format=json"]
|
||||
mock.assert_called_with(
|
||||
expected,
|
||||
cwd=None,
|
||||
runas=None,
|
||||
python_shell=False,
|
||||
)
|
||||
assert ret == {
|
||||
"MarkupSafe": "2.1.1",
|
||||
"idemenv": "0.2.0",
|
||||
"pip": "22.3.1",
|
||||
"pop": "23.0.0",
|
||||
"salt": "3006.0+0na.5b18e86",
|
||||
"typing_extensions": "4.4.0",
|
||||
"unattended-upgrades": "0.1",
|
||||
"yarl": "1.8.2",
|
||||
}
|
||||
|
||||
# Non zero returncode raises exception?
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "CABOOOOMMM!"})
|
||||
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
|
||||
with patch("salt.modules.pip.version", MagicMock(return_value="22.3.1")):
|
||||
pytest.raises(
|
||||
CommandExecutionError,
|
||||
pip.list_,
|
||||
)
|
||||
|
|
1032
tests/pytests/unit/utils/test_win_reg.py
Normal file
1032
tests/pytests/unit/utils/test_win_reg.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,981 +0,0 @@
|
|||
import pytest
|
||||
from saltfactories.utils import random_string
|
||||
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.win_reg as win_reg
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
try:
|
||||
import win32api
|
||||
|
||||
HAS_WIN32 = True
|
||||
except ImportError:
|
||||
HAS_WIN32 = False
|
||||
|
||||
UNICODE_KEY = "Unicode Key \N{TRADE MARK SIGN}"
|
||||
UNICODE_VALUE = (
|
||||
"Unicode Value \N{COPYRIGHT SIGN},\N{TRADE MARK SIGN},\N{REGISTERED SIGN}"
|
||||
)
|
||||
FAKE_KEY = "SOFTWARE\\{}".format(random_string("SaltTesting-", lowercase=False))
|
||||
|
||||
|
||||
@pytest.mark.skipif(not HAS_WIN32, reason="Tests require win32 libraries")
|
||||
class WinFunctionsTestCase(TestCase):
|
||||
"""
|
||||
Test cases for salt.utils.win_reg
|
||||
"""
|
||||
|
||||
def test_broadcast_change_success(self):
|
||||
"""
|
||||
Tests the broadcast_change function
|
||||
"""
|
||||
with patch("win32gui.SendMessageTimeout", return_value=("", 0)):
|
||||
self.assertTrue(win_reg.broadcast_change())
|
||||
|
||||
def test_broadcast_change_fail(self):
|
||||
"""
|
||||
Tests the broadcast_change function failure
|
||||
"""
|
||||
with patch("win32gui.SendMessageTimeout", return_value=("", 1)):
|
||||
self.assertFalse(win_reg.broadcast_change())
|
||||
|
||||
def test_key_exists_existing(self):
|
||||
"""
|
||||
Tests the key_exists function using a well known registry key
|
||||
"""
|
||||
self.assertTrue(win_reg.key_exists(hive="HKLM", key="SOFTWARE\\Microsoft"))
|
||||
|
||||
def test_key_exists_non_existing(self):
|
||||
"""
|
||||
Tests the key_exists function using a non existing registry key
|
||||
"""
|
||||
self.assertFalse(win_reg.key_exists(hive="HKLM", key=FAKE_KEY))
|
||||
|
||||
def test_key_exists_invalid_hive(self):
|
||||
"""
|
||||
Tests the key_exists function using an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.key_exists,
|
||||
hive="BADHIVE",
|
||||
key="SOFTWARE\\Microsoft",
|
||||
)
|
||||
|
||||
def test_key_exists_unknown_key_error(self):
|
||||
"""
|
||||
Tests the key_exists function with an unknown key error
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertRaises(
|
||||
win32api.error,
|
||||
win_reg.key_exists,
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft",
|
||||
)
|
||||
|
||||
def test_value_exists_existing(self):
|
||||
"""
|
||||
Tests the value_exists function using a well known registry key
|
||||
"""
|
||||
self.assertTrue(
|
||||
win_reg.value_exists(
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="CommonFilesDir",
|
||||
)
|
||||
)
|
||||
|
||||
def test_value_exists_non_existing(self):
|
||||
"""
|
||||
Tests the value_exists function using a non existing registry key
|
||||
"""
|
||||
self.assertFalse(
|
||||
win_reg.value_exists(
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="NonExistingValueName",
|
||||
)
|
||||
)
|
||||
|
||||
def test_value_exists_invalid_hive(self):
|
||||
"""
|
||||
Tests the value_exists function using an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.value_exists,
|
||||
hive="BADHIVE",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="CommonFilesDir",
|
||||
)
|
||||
|
||||
def test_value_exists_key_not_exist(self):
|
||||
"""
|
||||
Tests the value_exists function when the key does not exist
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertFalse(
|
||||
win_reg.value_exists(
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="CommonFilesDir",
|
||||
)
|
||||
)
|
||||
|
||||
def test_value_exists_unknown_key_error(self):
|
||||
"""
|
||||
Tests the value_exists function with an unknown error when opening the
|
||||
key
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertRaises(
|
||||
win32api.error,
|
||||
win_reg.value_exists,
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="CommonFilesDir",
|
||||
)
|
||||
|
||||
def test_value_exists_empty_default_value(self):
|
||||
"""
|
||||
Tests the value_exists function when querying the default value
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(2, "RegQueryValueEx", "Empty Value")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error):
|
||||
self.assertTrue(
|
||||
win_reg.value_exists(
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname=None,
|
||||
)
|
||||
)
|
||||
|
||||
def test_value_exists_no_vname(self):
|
||||
"""
|
||||
Tests the value_exists function when the vname does not exist
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegQueryValueEx", "Empty Value")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error):
|
||||
self.assertFalse(
|
||||
win_reg.value_exists(
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="NonExistingValuePair",
|
||||
)
|
||||
)
|
||||
|
||||
def test_list_keys_existing(self):
|
||||
"""
|
||||
Test the list_keys function using a well known registry key
|
||||
"""
|
||||
self.assertIn("Microsoft", win_reg.list_keys(hive="HKLM", key="SOFTWARE"))
|
||||
|
||||
def test_list_keys_non_existing(self):
|
||||
"""
|
||||
Test the list_keys function using a non existing registry key
|
||||
"""
|
||||
expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY))
|
||||
self.assertEqual(win_reg.list_keys(hive="HKLM", key=FAKE_KEY), expected)
|
||||
|
||||
def test_list_keys_invalid_hive(self):
|
||||
"""
|
||||
Test the list_keys function when passing an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.list_keys,
|
||||
hive="BADHIVE",
|
||||
key="SOFTWARE\\Microsoft",
|
||||
)
|
||||
|
||||
def test_list_keys_unknown_key_error(self):
|
||||
"""
|
||||
Tests the list_keys function with an unknown key error
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertRaises(
|
||||
win32api.error,
|
||||
win_reg.list_keys,
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft",
|
||||
)
|
||||
|
||||
def test_list_values_existing(self):
|
||||
"""
|
||||
Test the list_values function using a well known registry key
|
||||
"""
|
||||
values = win_reg.list_values(
|
||||
hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion"
|
||||
)
|
||||
keys = []
|
||||
for value in values:
|
||||
keys.append(value["vname"])
|
||||
self.assertIn("ProgramFilesDir", keys)
|
||||
|
||||
def test_list_values_non_existing(self):
|
||||
"""
|
||||
Test the list_values function using a non existing registry key
|
||||
"""
|
||||
expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY))
|
||||
self.assertEqual(win_reg.list_values(hive="HKLM", key=FAKE_KEY), expected)
|
||||
|
||||
def test_list_values_invalid_hive(self):
|
||||
"""
|
||||
Test the list_values function when passing an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.list_values,
|
||||
hive="BADHIVE",
|
||||
key="SOFTWARE\\Microsoft",
|
||||
)
|
||||
|
||||
def test_list_values_unknown_key_error(self):
|
||||
"""
|
||||
Tests the list_values function with an unknown key error
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertRaises(
|
||||
win32api.error,
|
||||
win_reg.list_values,
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft",
|
||||
)
|
||||
|
||||
def test_read_value_existing(self):
|
||||
"""
|
||||
Test the read_value function using a well known registry value
|
||||
"""
|
||||
ret = win_reg.read_value(
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="ProgramFilesPath",
|
||||
)
|
||||
self.assertEqual(ret["vdata"], "%ProgramFiles%")
|
||||
|
||||
def test_read_value_default(self):
|
||||
"""
|
||||
Test the read_value function reading the default value using a well
|
||||
known registry key
|
||||
"""
|
||||
ret = win_reg.read_value(
|
||||
hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion"
|
||||
)
|
||||
self.assertEqual(ret["vdata"], "(value not set)")
|
||||
|
||||
def test_read_value_non_existing(self):
|
||||
"""
|
||||
Test the read_value function using a non existing value pair
|
||||
"""
|
||||
expected = {
|
||||
"comment": (
|
||||
"Cannot find fake_name in HKLM\\SOFTWARE\\Microsoft\\"
|
||||
"Windows\\CurrentVersion"
|
||||
),
|
||||
"vdata": None,
|
||||
"vname": "fake_name",
|
||||
"success": False,
|
||||
"hive": "HKLM",
|
||||
"key": "SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
}
|
||||
self.assertDictEqual(
|
||||
win_reg.read_value(
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="fake_name",
|
||||
),
|
||||
expected,
|
||||
)
|
||||
|
||||
def test_read_value_non_existing_key(self):
|
||||
"""
|
||||
Test the read_value function using a non existing registry key
|
||||
"""
|
||||
expected = {
|
||||
"comment": "Cannot find key: HKLM\\{}".format(FAKE_KEY),
|
||||
"vdata": None,
|
||||
"vname": "fake_name",
|
||||
"success": False,
|
||||
"hive": "HKLM",
|
||||
"key": FAKE_KEY,
|
||||
}
|
||||
self.assertDictEqual(
|
||||
win_reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name"), expected
|
||||
)
|
||||
|
||||
def test_read_value_invalid_hive(self):
|
||||
"""
|
||||
Test the read_value function when passing an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.read_value,
|
||||
hive="BADHIVE",
|
||||
key="SOFTWARE\\Microsoft",
|
||||
vname="ProgramFilesPath",
|
||||
)
|
||||
|
||||
def test_read_value_unknown_key_error(self):
|
||||
"""
|
||||
Tests the read_value function with an unknown key error
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertRaises(
|
||||
win32api.error,
|
||||
win_reg.read_value,
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="ProgramFilesPath",
|
||||
)
|
||||
|
||||
def test_read_value_unknown_value_error(self):
|
||||
"""
|
||||
Tests the read_value function with an unknown value error
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegQueryValueEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error):
|
||||
self.assertRaises(
|
||||
win32api.error,
|
||||
win_reg.read_value,
|
||||
hive="HKLM",
|
||||
key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion",
|
||||
vname="ProgramFilesPath",
|
||||
)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_read_value_multi_sz_empty_list(self):
|
||||
"""
|
||||
An empty REG_MULTI_SZ value should return an empty list, not None
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM",
|
||||
key=FAKE_KEY,
|
||||
vname="empty_list",
|
||||
vdata=[],
|
||||
vtype="REG_MULTI_SZ",
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"hive": "HKLM",
|
||||
"key": FAKE_KEY,
|
||||
"success": True,
|
||||
"vdata": [],
|
||||
"vname": "empty_list",
|
||||
"vtype": "REG_MULTI_SZ",
|
||||
}
|
||||
self.assertEqual(
|
||||
win_reg.read_value(
|
||||
hive="HKLM",
|
||||
key=FAKE_KEY,
|
||||
vname="empty_list",
|
||||
),
|
||||
expected,
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_set_value(self):
|
||||
"""
|
||||
Test the set_value function
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data"
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"hive": "HKLM",
|
||||
"key": FAKE_KEY,
|
||||
"success": True,
|
||||
"vdata": "fake_data",
|
||||
"vname": "fake_name",
|
||||
"vtype": "REG_SZ",
|
||||
}
|
||||
self.assertEqual(
|
||||
win_reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name"),
|
||||
expected,
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_set_value_default(self):
|
||||
"""
|
||||
Test the set_value function on the default value
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(hive="HKLM", key=FAKE_KEY, vdata="fake_default_data")
|
||||
)
|
||||
expected = {
|
||||
"hive": "HKLM",
|
||||
"key": FAKE_KEY,
|
||||
"success": True,
|
||||
"vdata": "fake_default_data",
|
||||
"vname": "(Default)",
|
||||
"vtype": "REG_SZ",
|
||||
}
|
||||
self.assertEqual(win_reg.read_value(hive="HKLM", key=FAKE_KEY), expected)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_set_value_unicode_key(self):
|
||||
"""
|
||||
Test the set_value function on a unicode key
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM",
|
||||
key="\\".join([FAKE_KEY, UNICODE_KEY]),
|
||||
vname="fake_name",
|
||||
vdata="fake_value",
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"hive": "HKLM",
|
||||
"key": "\\".join([FAKE_KEY, UNICODE_KEY]),
|
||||
"success": True,
|
||||
"vdata": "fake_value",
|
||||
"vname": "fake_name",
|
||||
"vtype": "REG_SZ",
|
||||
}
|
||||
self.assertEqual(
|
||||
win_reg.read_value(
|
||||
hive="HKLM",
|
||||
key="\\".join([FAKE_KEY, UNICODE_KEY]),
|
||||
vname="fake_name",
|
||||
),
|
||||
expected,
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_set_value_unicode_value(self):
|
||||
"""
|
||||
Test the set_value function on a unicode value
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"hive": "HKLM",
|
||||
"key": FAKE_KEY,
|
||||
"success": True,
|
||||
"vdata": UNICODE_VALUE,
|
||||
"vname": "fake_unicode",
|
||||
"vtype": "REG_SZ",
|
||||
}
|
||||
self.assertEqual(
|
||||
win_reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode"),
|
||||
expected,
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_set_value_reg_dword(self):
|
||||
"""
|
||||
Test the set_value function on a REG_DWORD value
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM",
|
||||
key=FAKE_KEY,
|
||||
vname="dword_value",
|
||||
vdata=123,
|
||||
vtype="REG_DWORD",
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"hive": "HKLM",
|
||||
"key": FAKE_KEY,
|
||||
"success": True,
|
||||
"vdata": 123,
|
||||
"vname": "dword_value",
|
||||
"vtype": "REG_DWORD",
|
||||
}
|
||||
self.assertEqual(
|
||||
win_reg.read_value(hive="HKLM", key=FAKE_KEY, vname="dword_value"),
|
||||
expected,
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_set_value_reg_qword(self):
|
||||
"""
|
||||
Test the set_value function on a REG_QWORD value
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM",
|
||||
key=FAKE_KEY,
|
||||
vname="qword_value",
|
||||
vdata=123,
|
||||
vtype="REG_QWORD",
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"hive": "HKLM",
|
||||
"key": FAKE_KEY,
|
||||
"success": True,
|
||||
"vdata": 123,
|
||||
"vname": "qword_value",
|
||||
"vtype": "REG_QWORD",
|
||||
}
|
||||
self.assertEqual(
|
||||
win_reg.read_value(hive="HKLM", key=FAKE_KEY, vname="qword_value"),
|
||||
expected,
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
def test_set_value_invalid_hive(self):
|
||||
"""
|
||||
Test the set_value function when passing an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.set_value,
|
||||
hive="BADHIVE",
|
||||
key=FAKE_KEY,
|
||||
vname="fake_name",
|
||||
vdata="fake_data",
|
||||
)
|
||||
|
||||
def test_set_value_open_create_failure(self):
|
||||
"""
|
||||
Test the set_value function when there is a problem opening/creating
|
||||
the key
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegCreateKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegCreateKeyEx", mock_error):
|
||||
self.assertFalse(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data"
|
||||
)
|
||||
)
|
||||
|
||||
def test_set_value_type_error(self):
|
||||
"""
|
||||
Test the set_value function when the wrong type of data is passed
|
||||
"""
|
||||
mock_error = MagicMock(side_effect=TypeError("Mocked TypeError"))
|
||||
with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error):
|
||||
self.assertFalse(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data"
|
||||
)
|
||||
)
|
||||
|
||||
def test_set_value_system_error(self):
|
||||
"""
|
||||
Test the set_value function when a SystemError occurs while setting the
|
||||
value
|
||||
"""
|
||||
mock_error = MagicMock(side_effect=SystemError("Mocked SystemError"))
|
||||
with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error):
|
||||
self.assertFalse(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data"
|
||||
)
|
||||
)
|
||||
|
||||
def test_set_value_value_error(self):
|
||||
"""
|
||||
Test the set_value function when a ValueError occurs while setting the
|
||||
value
|
||||
"""
|
||||
mock_error = MagicMock(side_effect=ValueError("Mocked ValueError"))
|
||||
with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error):
|
||||
self.assertFalse(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data"
|
||||
)
|
||||
)
|
||||
|
||||
def test_cast_vdata_reg_binary(self):
|
||||
"""
|
||||
Test the cast_vdata function with REG_BINARY
|
||||
Should always return binary data
|
||||
"""
|
||||
vdata = salt.utils.stringutils.to_bytes("test data")
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_BINARY")
|
||||
self.assertTrue(isinstance(result, bytes))
|
||||
|
||||
vdata = salt.utils.stringutils.to_str("test data")
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_BINARY")
|
||||
self.assertTrue(isinstance(result, bytes))
|
||||
|
||||
vdata = salt.utils.stringutils.to_unicode("test data")
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_BINARY")
|
||||
self.assertTrue(isinstance(result, bytes))
|
||||
|
||||
def test_cast_vdata_reg_dword(self):
|
||||
"""
|
||||
Test the cast_vdata function with REG_DWORD
|
||||
Should always return integer
|
||||
"""
|
||||
vdata = 1
|
||||
expected = 1
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_DWORD")
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
vdata = "1"
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_DWORD")
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
vdata = "0000001"
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_DWORD")
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_cast_vdata_reg_expand_sz(self):
|
||||
"""
|
||||
Test the cast_vdata function with REG_EXPAND_SZ
|
||||
Should always return unicode
|
||||
"""
|
||||
vdata = salt.utils.stringutils.to_str("test data")
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_EXPAND_SZ")
|
||||
self.assertTrue(isinstance(result, str))
|
||||
|
||||
vdata = salt.utils.stringutils.to_bytes("test data")
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_EXPAND_SZ")
|
||||
self.assertTrue(isinstance(result, str))
|
||||
|
||||
def test_cast_vdata_reg_multi_sz(self):
|
||||
"""
|
||||
Test the cast_vdata function with REG_MULTI_SZ
|
||||
Should always return a list of unicode strings
|
||||
"""
|
||||
vdata = [
|
||||
salt.utils.stringutils.to_str("test string"),
|
||||
salt.utils.stringutils.to_bytes("test bytes"),
|
||||
]
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_MULTI_SZ")
|
||||
self.assertTrue(isinstance(result, list))
|
||||
for item in result:
|
||||
self.assertTrue(isinstance(item, str))
|
||||
|
||||
def test_cast_vdata_reg_qword(self):
|
||||
"""
|
||||
Test the cast_vdata function with REG_QWORD
|
||||
Should always return a long integer
|
||||
`int` is `long` is default on Py3
|
||||
"""
|
||||
vdata = 1
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_QWORD")
|
||||
self.assertTrue(isinstance(result, int))
|
||||
|
||||
vdata = "1"
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_QWORD")
|
||||
self.assertTrue(isinstance(result, int))
|
||||
|
||||
def test_cast_vdata_reg_sz(self):
|
||||
"""
|
||||
Test the cast_vdata function with REG_SZ
|
||||
Should always return unicode
|
||||
"""
|
||||
vdata = salt.utils.stringutils.to_str("test data")
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_SZ")
|
||||
self.assertTrue(isinstance(result, str))
|
||||
|
||||
vdata = salt.utils.stringutils.to_bytes("test data")
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_SZ")
|
||||
self.assertTrue(isinstance(result, str))
|
||||
|
||||
vdata = None
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype="REG_SZ")
|
||||
self.assertTrue(isinstance(result, str))
|
||||
self.assertEqual(result, "")
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_value(self):
|
||||
"""
|
||||
Test the delete_value function
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data"
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
win_reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name")
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
def test_delete_value_non_existing(self):
|
||||
"""
|
||||
Test the delete_value function on non existing value
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertIsNone(
|
||||
win_reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name")
|
||||
)
|
||||
|
||||
def test_delete_value_invalid_hive(self):
|
||||
"""
|
||||
Test the delete_value function when passing an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.delete_value,
|
||||
hive="BADHIVE",
|
||||
key=FAKE_KEY,
|
||||
vname="fake_name",
|
||||
)
|
||||
|
||||
def test_delete_value_unknown_error(self):
|
||||
"""
|
||||
Test the delete_value function when there is a problem opening the key
|
||||
"""
|
||||
mock_error = MagicMock(
|
||||
side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error")
|
||||
)
|
||||
with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error):
|
||||
self.assertRaises(
|
||||
win32api.error,
|
||||
win_reg.delete_value,
|
||||
hive="HKLM",
|
||||
key=FAKE_KEY,
|
||||
vname="fake_name",
|
||||
)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_value_unicode(self):
|
||||
"""
|
||||
Test the delete_value function on a unicode value
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
win_reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode")
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_value_unicode_vname(self):
|
||||
"""
|
||||
Test the delete_value function on a unicode vname
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY, vdata="junk data"
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
win_reg.delete_value(hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY)
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_value_unicode_key(self):
|
||||
"""
|
||||
Test the delete_value function on a unicode key
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM",
|
||||
key="\\".join([FAKE_KEY, UNICODE_KEY]),
|
||||
vname="fake_name",
|
||||
vdata="junk data",
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
win_reg.delete_value(
|
||||
hive="HKLM",
|
||||
key="\\".join([FAKE_KEY, UNICODE_KEY]),
|
||||
vname="fake_name",
|
||||
)
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
def test_delete_key_recursive_invalid_hive(self):
|
||||
"""
|
||||
Test the delete_key_recursive function when passing an invalid hive
|
||||
"""
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
win_reg.delete_key_recursive,
|
||||
hive="BADHIVE",
|
||||
key=FAKE_KEY,
|
||||
)
|
||||
|
||||
def test_delete_key_recursive_key_not_found(self):
|
||||
"""
|
||||
Test the delete_key_recursive function when the passed key to delete is
|
||||
not found.
|
||||
"""
|
||||
self.assertFalse(win_reg.key_exists(hive="HKLM", key=FAKE_KEY))
|
||||
self.assertFalse(win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY))
|
||||
|
||||
def test_delete_key_recursive_too_close(self):
|
||||
"""
|
||||
Test the delete_key_recursive function when the passed key to delete is
|
||||
too close to root, such as
|
||||
"""
|
||||
mock_true = MagicMock(return_value=True)
|
||||
with patch("salt.utils.win_reg.key_exists", mock_true):
|
||||
self.assertFalse(win_reg.delete_key_recursive(hive="HKLM", key="FAKE_KEY"))
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_key_recursive(self):
|
||||
"""
|
||||
Test the delete_key_recursive function
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value"
|
||||
)
|
||||
)
|
||||
expected = {"Deleted": ["\\".join(["HKLM", FAKE_KEY])], "Failed": []}
|
||||
self.assertDictEqual(
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_key_recursive_failed_to_open_key(self):
|
||||
"""
|
||||
Test the delete_key_recursive function on failure to open the key
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value"
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"Deleted": [],
|
||||
"Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Failed to connect to key"],
|
||||
}
|
||||
mock_true = MagicMock(return_value=True)
|
||||
mock_error = MagicMock(
|
||||
side_effect=[
|
||||
1,
|
||||
win32api.error(3, "RegOpenKeyEx", "Failed to connect to key"),
|
||||
]
|
||||
)
|
||||
with patch("salt.utils.win_reg.key_exists", mock_true), patch(
|
||||
"salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error
|
||||
):
|
||||
self.assertDictEqual(
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_key_recursive_failed_to_delete(self):
|
||||
"""
|
||||
Test the delete_key_recursive function on failure to delete a key
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value"
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"Deleted": [],
|
||||
"Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Unknown error"],
|
||||
}
|
||||
# pylint: disable=undefined-variable
|
||||
mock_error = MagicMock(side_effect=WindowsError("Unknown error"))
|
||||
# pylint: enable=undefined-variable
|
||||
with patch("salt.utils.win_reg.win32api.RegDeleteKey", mock_error):
|
||||
self.assertDictEqual(
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_delete_key_recursive_unicode(self):
|
||||
"""
|
||||
Test the delete_key_recursive function on value within a unicode key
|
||||
"""
|
||||
try:
|
||||
self.assertTrue(
|
||||
win_reg.set_value(
|
||||
hive="HKLM",
|
||||
key="\\".join([FAKE_KEY, UNICODE_KEY]),
|
||||
vname="fake_name",
|
||||
vdata="fake_value",
|
||||
)
|
||||
)
|
||||
expected = {
|
||||
"Deleted": ["\\".join(["HKLM", FAKE_KEY, UNICODE_KEY])],
|
||||
"Failed": [],
|
||||
}
|
||||
self.assertDictEqual(
|
||||
win_reg.delete_key_recursive(
|
||||
hive="HKLM", key="\\".join([FAKE_KEY, UNICODE_KEY])
|
||||
),
|
||||
expected,
|
||||
)
|
||||
finally:
|
||||
win_reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)
|
||||
|
||||
def test__to_unicode_int(self):
|
||||
"""
|
||||
Test the ``_to_unicode`` function when it receives an integer value.
|
||||
Should return a unicode value, which is unicode in PY2 and str in PY3.
|
||||
"""
|
||||
self.assertTrue(isinstance(win_reg._to_unicode(1), str))
|
|
@ -4,6 +4,7 @@ import tools.changelog
|
|||
import tools.ci
|
||||
import tools.docs
|
||||
import tools.pkg
|
||||
import tools.pre_commit
|
||||
import tools.vm
|
||||
|
||||
for name in ("boto3", "botocore", "urllib3"):
|
||||
|
|
|
@ -9,6 +9,7 @@ import logging
|
|||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from ptscripts import Context, command_group
|
||||
|
@ -18,26 +19,41 @@ log = logging.getLogger(__name__)
|
|||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
|
||||
# Define the command group
|
||||
cl = command_group(name="changelog", help="Changelog tools", description=__doc__)
|
||||
cl = command_group(
|
||||
name="changelog",
|
||||
help="Changelog tools",
|
||||
description=__doc__,
|
||||
venv_config={
|
||||
"requirements_files": [
|
||||
REPO_ROOT
|
||||
/ "requirements"
|
||||
/ "static"
|
||||
/ "ci"
|
||||
/ "py{}.{}".format(*sys.version_info)
|
||||
/ "changelog.txt"
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def changelog(version):
|
||||
def _get_changelog_contents(ctx: Context, version: str):
|
||||
"""
|
||||
Return the full changelog generated by towncrier.
|
||||
"""
|
||||
return subprocess.run(
|
||||
["towncrier", "build", "--draft", f"--version={version}"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=True,
|
||||
return ctx.run(
|
||||
"towncrier",
|
||||
"build",
|
||||
"--draft",
|
||||
f"--version={version}",
|
||||
capture=True,
|
||||
).stdout.decode()
|
||||
|
||||
|
||||
def pkg_changelog(version):
|
||||
def _get_pkg_changelog_contents(ctx: Context, version: str):
|
||||
"""
|
||||
Return a version of the changelog entries suitable for packaged changelogs.
|
||||
"""
|
||||
changes = changelog(version)
|
||||
changes = _get_changelog_contents(ctx, version)
|
||||
changes = "\n".join(changes.split("\n")[2:])
|
||||
changes = changes.replace(
|
||||
textwrap.dedent(
|
||||
|
@ -92,7 +108,7 @@ def pkg_changelog(version):
|
|||
return changes
|
||||
|
||||
|
||||
def version():
|
||||
def _get_salt_version():
|
||||
return (
|
||||
subprocess.run(
|
||||
["python3", "salt/version.py"], stdout=subprocess.PIPE, check=True
|
||||
|
@ -120,8 +136,8 @@ def version():
|
|||
)
|
||||
def update_rpm(ctx: Context, salt_version: str, draft: bool = False):
|
||||
if salt_version is None:
|
||||
salt_version = version()
|
||||
changes = pkg_changelog(salt_version)
|
||||
salt_version = _get_salt_version()
|
||||
changes = _get_pkg_changelog_contents(ctx, salt_version)
|
||||
ctx.info("Salt version is %s", salt_version)
|
||||
orig = ctx.run(
|
||||
"sed",
|
||||
|
@ -170,8 +186,8 @@ def update_rpm(ctx: Context, salt_version: str, draft: bool = False):
|
|||
)
|
||||
def update_deb(ctx: Context, salt_version: str, draft: bool = False):
|
||||
if salt_version is None:
|
||||
salt_version = version()
|
||||
changes = pkg_changelog(salt_version)
|
||||
salt_version = _get_salt_version()
|
||||
changes = _get_pkg_changelog_contents(ctx, salt_version)
|
||||
formated = "\n".join([f" {_.replace('-', '*', 1)}" for _ in changes.split("\n")])
|
||||
dt = datetime.datetime.utcnow()
|
||||
date = dt.strftime("%a, %d %b %Y %H:%M:%S +0000")
|
||||
|
@ -213,14 +229,14 @@ def update_deb(ctx: Context, salt_version: str, draft: bool = False):
|
|||
)
|
||||
def update_release_notes(ctx: Context, salt_version: str, draft: bool = False):
|
||||
if salt_version is None:
|
||||
salt_version = version()
|
||||
salt_version = _get_salt_version()
|
||||
if "+" in salt_version:
|
||||
major_version = salt_version.split("+", 1)[0]
|
||||
else:
|
||||
major_version = salt_version
|
||||
changes = changelog(salt_version)
|
||||
changes = _get_changelog_contents(ctx, salt_version)
|
||||
changes = "\n".join(changes.split("\n")[2:])
|
||||
tmpnotes = f"doc/topics/releases/{version}.rst.tmp"
|
||||
tmpnotes = f"doc/topics/releases/{salt_version}.rst.tmp"
|
||||
try:
|
||||
with open(f"doc/topics/releases/{major_version}.rst") as rfp:
|
||||
existing = rfp.read()
|
||||
|
@ -258,8 +274,8 @@ def update_release_notes(ctx: Context, salt_version: str, draft: bool = False):
|
|||
)
|
||||
def generate_changelog_md(ctx: Context, salt_version: str, draft: bool = False):
|
||||
if salt_version is None:
|
||||
salt_version = version()
|
||||
cmd = ["towncrier", "build", f"--version={version}"]
|
||||
salt_version = _get_salt_version()
|
||||
cmd = ["towncrier", "build", f"--version={salt_version}"]
|
||||
if draft:
|
||||
cmd += ["--draft"]
|
||||
else:
|
||||
|
|
|
@ -8,6 +8,7 @@ import logging
|
|||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
|
@ -16,10 +17,24 @@ log = logging.getLogger(__name__)
|
|||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
|
||||
# Define the command group
|
||||
doc = command_group(name="docs", help="Manpages tools", description=__doc__)
|
||||
docs = command_group(
|
||||
name="docs",
|
||||
help="Manpages tools",
|
||||
description=__doc__,
|
||||
venv_config={
|
||||
"requirements_files": [
|
||||
REPO_ROOT
|
||||
/ "requirements"
|
||||
/ "static"
|
||||
/ "ci"
|
||||
/ "py{}.{}".format(*sys.version_info)
|
||||
/ "docs.txt"
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@doc.command(
|
||||
@docs.command(
|
||||
name="man",
|
||||
)
|
||||
def man(ctx: Context):
|
||||
|
@ -30,7 +45,7 @@ def man(ctx: Context):
|
|||
shutil.copy(os.path.join(root, file), os.path.join("doc/man", file))
|
||||
|
||||
|
||||
@doc.command(
|
||||
@docs.command(
|
||||
name="html",
|
||||
)
|
||||
def html(ctx: Context):
|
||||
|
@ -38,7 +53,7 @@ def html(ctx: Context):
|
|||
ctx.run("make", "html", "SHPINXOPTS=-W", cwd="doc/", check=True)
|
||||
|
||||
|
||||
@doc.command(
|
||||
@docs.command(
|
||||
name="epub",
|
||||
)
|
||||
def epub(ctx: Context):
|
||||
|
@ -46,7 +61,7 @@ def epub(ctx: Context):
|
|||
ctx.run("make", "epub", "SHPINXOPTS=-W", cwd="doc/", check=True)
|
||||
|
||||
|
||||
@doc.command(
|
||||
@docs.command(
|
||||
name="pdf",
|
||||
)
|
||||
def pdf(ctx: Context):
|
||||
|
|
14
tools/pkg.py
14
tools/pkg.py
|
@ -284,6 +284,11 @@ def generate_hashes(ctx: Context, files: list[pathlib.Path]):
|
|||
|
||||
@pkg.command(
|
||||
name="source-tarball",
|
||||
venv_config={
|
||||
"requirements_files": [
|
||||
REPO_ROOT / "requirements" / "build.txt",
|
||||
]
|
||||
},
|
||||
)
|
||||
def source_tarball(ctx: Context):
|
||||
shutil.rmtree("dist/", ignore_errors=True)
|
||||
|
@ -295,7 +300,12 @@ def source_tarball(ctx: Context):
|
|||
"HEAD",
|
||||
capture=True,
|
||||
).stdout.strip()
|
||||
env = {**os.environ, **{"SOURCE_DATE_EPOCH": str(timestamp)}}
|
||||
env = {
|
||||
**os.environ,
|
||||
**{
|
||||
"SOURCE_DATE_EPOCH": str(timestamp),
|
||||
},
|
||||
}
|
||||
ctx.run(
|
||||
"python3",
|
||||
"-m",
|
||||
|
@ -308,7 +318,7 @@ def source_tarball(ctx: Context):
|
|||
# Recreate sdist to be reproducible
|
||||
recompress = Recompress(timestamp)
|
||||
for targz in REPO_ROOT.joinpath("dist").glob("*.tar.gz"):
|
||||
ctx.info("Re-compressing %s...", targz.relative_to(REPO_ROOT))
|
||||
ctx.info(f"Re-compressing {targz.relative_to(REPO_ROOT)} ...")
|
||||
recompress.recompress(targz)
|
||||
sha256sum = shutil.which("sha256sum")
|
||||
if sha256sum:
|
||||
|
|
105
tools/pre_commit.py
Normal file
105
tools/pre_commit.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
"""
|
||||
These commands are used by pre-commit.
|
||||
"""
|
||||
# pylint: disable=resource-leakage,broad-except
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import pathlib
|
||||
import shutil
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
|
||||
TEMPLATES = WORKFLOWS / "templates"
|
||||
|
||||
# Define the command group
|
||||
cgroup = command_group(
|
||||
name="pre-commit", help="Pre-Commit Related Commands", description=__doc__
|
||||
)
|
||||
|
||||
|
||||
class NoDuplicatesList(list):
|
||||
def append(self, need):
|
||||
if need not in self:
|
||||
super().append(need)
|
||||
|
||||
|
||||
@cgroup.command(
|
||||
name="generate-workflows",
|
||||
)
|
||||
def generate_workflows(ctx: Context):
|
||||
"""
|
||||
Generate GitHub Actions Workflows
|
||||
"""
|
||||
workflows = {
|
||||
"CI": {
|
||||
"template": "ci.yml",
|
||||
},
|
||||
"Scheduled": {
|
||||
"template": "scheduled.yml",
|
||||
},
|
||||
}
|
||||
env = Environment(
|
||||
block_start_string="<%",
|
||||
block_end_string="%>",
|
||||
variable_start_string="<{",
|
||||
variable_end_string="}>",
|
||||
extensions=[
|
||||
"jinja2.ext.do",
|
||||
],
|
||||
loader=FileSystemLoader(str(TEMPLATES)),
|
||||
)
|
||||
for workflow_name, details in workflows.items():
|
||||
template = details["template"]
|
||||
workflow_path = WORKFLOWS / template
|
||||
template_path = TEMPLATES / f"{template}.j2"
|
||||
ctx.info(
|
||||
f"Generating '{workflow_path.relative_to(REPO_ROOT)}' from "
|
||||
f"template '{template_path.relative_to(REPO_ROOT)}' ..."
|
||||
)
|
||||
context = {
|
||||
"template": template_path.relative_to(REPO_ROOT),
|
||||
"workflow_name": workflow_name,
|
||||
"conclusion_needs": NoDuplicatesList(),
|
||||
}
|
||||
loaded_template = env.get_template(f"{template}.j2")
|
||||
rendered_template = loaded_template.render(**context)
|
||||
workflow_path.write_text(rendered_template.rstrip() + "\n")
|
||||
|
||||
|
||||
@cgroup.command(
|
||||
name="actionlint",
|
||||
arguments={
|
||||
"files": {
|
||||
"help": "Files to run actionlint against",
|
||||
"nargs": "*",
|
||||
},
|
||||
"no_color": {
|
||||
"help": "Disable colors in output",
|
||||
},
|
||||
},
|
||||
)
|
||||
def actionlint(ctx: Context, files: list[str], no_color: bool = False):
|
||||
"""
|
||||
Run `actionlint`
|
||||
"""
|
||||
actionlint = shutil.which("actionlint")
|
||||
if not actionlint:
|
||||
ctx.warn("Could not find the 'actionlint' binary")
|
||||
ctx.exit(0)
|
||||
cmdline = [actionlint]
|
||||
if no_color is False:
|
||||
cmdline.append("-color")
|
||||
shellcheck = shutil.which("shellcheck")
|
||||
if shellcheck:
|
||||
cmdline.append(f"-shellcheck={shellcheck}")
|
||||
pyflakes = shutil.which("pyflakes")
|
||||
if pyflakes:
|
||||
cmdline.append(f"-pyflakes={pyflakes}")
|
||||
ret = ctx.run(*cmdline, *files, check=False)
|
||||
ctx.exit(ret.returncode)
|
Loading…
Add table
Reference in a new issue