Merge pull request #66654 from dwoz/merge/3007.x/3006.x

[3007.x] Merge forward 3006.x into 3007.x
This commit is contained in:
Daniel Wozniak 2024-06-27 05:39:18 -07:00 committed by GitHub
commit 847c94b56d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
163 changed files with 4133 additions and 1452 deletions

53
.github/CODEOWNERS vendored
View file

@ -9,55 +9,4 @@
# This file uses an fnmatch-style matching pattern.
# Team Core
* @saltstack/team-core
# Team Boto
salt/*/*boto* @saltstack/team-core
# Team Cloud
salt/cloud/* @saltstack/team-core
salt/utils/openstack/* @saltstack/team-core
salt/utils/aws.py @saltstack/team-core
salt/*/*cloud* @saltstack/team-core
# Team NetAPI
salt/cli/api.py @saltstack/team-core
salt/client/netapi.py @saltstack/team-core
salt/netapi/* @saltstack/team-core
# Team Network
salt/proxy/* @saltstack/team-core
# Team SPM
salt/cli/spm.py @saltstack/team-core
salt/spm/* @saltstack/team-core
# Team SSH
salt/cli/ssh.py @saltstack/team-core
salt/client/ssh/* @saltstack/team-core
salt/roster/* @saltstack/team-core
salt/runners/ssh.py @saltstack/team-core
salt/*/thin.py @saltstack/team-core
# Team State
salt/state.py @saltstack/team-core
# Team SUSE
salt/*/*btrfs* @saltstack/team-core
salt/*/*kubernetes* @saltstack/team-core
salt/*/*pkg* @saltstack/team-core
salt/*/*snapper* @saltstack/team-core
salt/*/*xfs* @saltstack/team-core
salt/*/*zypper* @saltstack/team-core
# Team Transport
salt/transport/* @saltstack/team-core
salt/utils/zeromq.py @saltstack/team-core
# Team Windows
salt/*/*win* @saltstack/team-core
salt/modules/reg.py @saltstack/team-core
salt/states/reg.py @saltstack/team-core
tests/*/*win* @saltstack/team-core
tests/*/test_reg.py @saltstack/team-core
tests/pytests/* @saltstack/team-core @s0undt3ch
* @saltstack/salt-core-maintainers

View file

@ -40,8 +40,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -52,6 +53,8 @@ jobs:
runs-on: ubuntu-latest
outputs:
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Throttle Builds"
@ -66,6 +69,8 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
@ -123,7 +128,7 @@ jobs:
- name: PyPi Proxy
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
@ -195,6 +200,8 @@ jobs:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['macos'] }}
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Throttle Builds"
@ -321,7 +328,7 @@ jobs:
- name: PyPi Proxy
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'

View file

@ -32,8 +32,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
jobs:
@ -98,6 +99,7 @@ jobs:
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
env:
USE_S3_CACHE: 'false'
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Throttle Builds"
@ -148,6 +150,7 @@ jobs:
runs-on: windows-latest
env:
USE_S3_CACHE: 'false'
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Throttle Builds"

View file

@ -17,8 +17,7 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: https://pypi.org/simple
PIP_DISABLE_PIP_VERSION_CHECK: "1"
jobs:

View file

@ -39,8 +39,9 @@ on:
env:
COLUMNS: 190
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
jobs:
@ -54,7 +55,8 @@ jobs:
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
source:
- ${{ inputs.source }}
env:
PIP_INDEX_URL: https://pypi.org/simple
runs-on:
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
@ -289,7 +291,7 @@ jobs:
- ${{ inputs.source }}
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9
image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9
steps:
- uses: actions/checkout@v4
@ -375,6 +377,7 @@ jobs:
SM_CLIENT_CERT_PASSWORD: "${{ secrets.WIN_SIGN_CERT_PASSWORD }}"
SM_CLIENT_CERT_FILE_B64: "${{ secrets.WIN_SIGN_CERT_FILE_B64 }}"
WIN_SIGN_CERT_SHA1_HASH: "${{ secrets.WIN_SIGN_CERT_SHA1_HASH }}"
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: Check Package Signing Enabled

View file

@ -32,8 +32,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
jobs:
@ -102,6 +103,8 @@ jobs:
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }}
runs-on:
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }}
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Throttle Builds"
@ -156,6 +159,9 @@ jobs:
- x86
- amd64
runs-on: windows-latest
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Throttle Builds"

View file

@ -437,7 +437,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-salt-onedir:
@ -453,7 +453,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-pkgs-onedir:
@ -466,7 +466,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "onedir"
@ -480,7 +480,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "src"
build-ci-deps:
@ -666,27 +666,6 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
@ -1579,27 +1558,6 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
centos-7:
name: CentOS 7 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
@ -2028,6 +1986,8 @@ jobs:
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
runs-on: ubuntu-latest
env:
PIP_INDEX_URL: https://pypi.org/simple
needs:
- prepare-workflow
- build-ci-deps
@ -2046,7 +2006,6 @@ jobs:
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- centos-7
- debian-11
- debian-11-arm64
- debian-12
@ -2214,7 +2173,6 @@ jobs:
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- centos-7
- debian-11
- debian-11-arm64
- debian-12
@ -2243,7 +2201,6 @@ jobs:
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
- amazonlinux-2023-arm64-pkg-tests
- centos-7-pkg-tests
- debian-11-pkg-tests
- debian-11-arm64-pkg-tests
- debian-12-pkg-tests

View file

@ -11,8 +11,7 @@ on:
env:
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: https://pypi.org/simple
PIP_DISABLE_PIP_VERSION_CHECK: "1"
@ -23,7 +22,7 @@ jobs:
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
container:
image: ghcr.io/saltstack/salt-ci-containers/python:3.9
image: ghcr.io/saltstack/salt-ci-containers/python:3.10
steps:
- name: Install System Deps
@ -67,7 +66,7 @@ jobs:
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }}
container:
image: ghcr.io/saltstack/salt-ci-containers/python:3.8
image: ghcr.io/saltstack/salt-ci-containers/python:3.10
steps:
- name: Install System Deps

View file

@ -494,7 +494,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-salt-onedir:
@ -510,7 +510,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-pkgs-onedir:
@ -523,7 +523,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "onedir"
environment: nightly
@ -541,7 +541,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "src"
environment: nightly
@ -731,27 +731,6 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1644,27 +1623,6 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
centos-7:
name: CentOS 7 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2093,6 +2051,8 @@ jobs:
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
runs-on: ubuntu-latest
env:
PIP_INDEX_URL: https://pypi.org/simple
needs:
- prepare-workflow
- build-ci-deps
@ -2111,7 +2071,6 @@ jobs:
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- centos-7
- debian-11
- debian-11-arm64
- debian-12
@ -2558,18 +2517,6 @@ jobs:
distro: photon
version: "5"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "7"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "8"
@ -3031,7 +2978,6 @@ jobs:
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- centos-7
- debian-11
- debian-11-arm64
- debian-12
@ -3118,7 +3064,6 @@ jobs:
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
- amazonlinux-2023-arm64-pkg-tests
- centos-7-pkg-tests
- debian-11-pkg-tests
- debian-11-arm64-pkg-tests
- debian-12-pkg-tests

View file

@ -19,8 +19,9 @@ on:
env:
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
permissions:

View file

@ -20,8 +20,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
jobs:
upload-virustotal:

View file

@ -484,7 +484,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-salt-onedir:
@ -500,7 +500,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-pkgs-onedir:
@ -513,7 +513,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "onedir"
@ -527,7 +527,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "src"
build-ci-deps:
@ -713,27 +713,6 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1626,27 +1605,6 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
centos-7:
name: CentOS 7 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2075,6 +2033,8 @@ jobs:
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
runs-on: ubuntu-latest
env:
PIP_INDEX_URL: https://pypi.org/simple
needs:
- prepare-workflow
- build-ci-deps
@ -2093,7 +2053,6 @@ jobs:
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- centos-7
- debian-11
- debian-11-arm64
- debian-12
@ -2263,7 +2222,6 @@ jobs:
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- centos-7
- debian-11
- debian-11-arm64
- debian-12
@ -2292,7 +2250,6 @@ jobs:
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
- amazonlinux-2023-arm64-pkg-tests
- centos-7-pkg-tests
- debian-11-pkg-tests
- debian-11-arm64-pkg-tests
- debian-12-pkg-tests

View file

@ -476,7 +476,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-salt-onedir:
@ -492,7 +492,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
build-pkgs-onedir:
@ -505,7 +505,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "onedir"
environment: staging
@ -523,7 +523,7 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.16.0"
relenv-version: "0.16.1"
python-version: "3.10.14"
source: "src"
environment: staging
@ -713,27 +713,6 @@ jobs:
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
centos-7-pkg-tests:
name: CentOS 7 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: rpm
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
debian-11-pkg-tests:
name: Debian 11 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -1626,27 +1605,6 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
centos-7:
name: CentOS 7 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: centos-7
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: true
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2381,18 +2339,6 @@ jobs:
distro: photon
version: "5"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "7"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "8"
@ -2988,7 +2934,6 @@ jobs:
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- centos-7
- debian-11
- debian-11-arm64
- debian-12
@ -3017,7 +2962,6 @@ jobs:
- amazonlinux-2-arm64-pkg-tests
- amazonlinux-2023-pkg-tests
- amazonlinux-2023-arm64-pkg-tests
- centos-7-pkg-tests
- debian-11-pkg-tests
- debian-11-arm64-pkg-tests
- debian-12-pkg-tests

View file

@ -311,6 +311,8 @@
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
runs-on: ubuntu-latest
env:
PIP_INDEX_URL: https://pypi.org/simple
needs:
- prepare-workflow
<%- for need in test_salt_needs.iter(consume=False) %>

View file

@ -70,8 +70,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -97,6 +98,8 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
@ -159,7 +162,7 @@ jobs:
- name: PyPi Proxy
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -307,6 +310,8 @@ jobs:
needs:
- test
- generate-matrix
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: Checkout Source Code

View file

@ -67,8 +67,9 @@ on:
env:
COLUMNS: 190
PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/"
PIP_EXTRA_INDEX_URL: "https://pypi.org/simple"
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -94,6 +95,8 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
@ -162,6 +165,8 @@ jobs:
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Decompress .nox Directory
run: |
@ -335,6 +340,8 @@ jobs:
needs:
- test
- generate-matrix
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: Checkout Source Code

View file

@ -70,8 +70,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -97,6 +98,8 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
@ -159,7 +162,7 @@ jobs:
- name: PyPi Proxy
run: |
sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
@ -308,6 +311,8 @@ jobs:
needs:
- test
- generate-matrix
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: Checkout Source Code

View file

@ -48,8 +48,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -74,6 +75,8 @@ jobs:
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
@ -296,6 +299,7 @@ jobs:
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
env:
USE_S3_CACHE: 'false'
PIP_INDEX_URL: https://pypi.org/simple
environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:

View file

@ -65,8 +65,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
USE_S3_CACHE: 'true'

View file

@ -62,8 +62,9 @@ on:
env:
COLUMNS: 190
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -162,6 +163,8 @@ jobs:
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
@ -263,3 +266,5 @@ jobs:
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple

View file

@ -65,8 +65,9 @@ env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"

View file

@ -4,77 +4,65 @@
- saltproject-security.pdl@broadcom.com
**GPG key ID:**
- 4EA0793D
- 37654A06
**GPG key fingerprint:**
- `8ABE 4EFC F0F4 B24B FF2A AF90 D570 F2D3 4EA0 793D`
- `99EF 26F2 6469 2D24 973A 7007 E8BF 76A7 3765 4A06`
**GPG Public Key**
```
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBFO15mMBEADa3CfQwk5ED9wAQ8fFDku277CegG3U1hVGdcxqKNvucblwoKCb
hRK6u9ihgaO9V9duV2glwgjytiBI/z6lyWqdaD37YXG/gTL+9Md+qdSDeaOa/9eg
7y+g4P+FvU9HWUlujRVlofUn5Dj/IZgUywbxwEybutuzvvFVTzsn+DFVwTH34Qoh
QIuNzQCSEz3Lhh8zq9LqkNy91ZZQO1ZIUrypafspH6GBHHcE8msBFgYiNBnVcUFH
u0r4j1Rav+621EtD5GZsOt05+NJI8pkaC/dDKjURcuiV6bhmeSpNzLaXUhwx6f29
Vhag5JhVGGNQxlRTxNEM86HEFp+4zJQ8m/wRDrGX5IAHsdESdhP+ljDVlAAX/ttP
/Ucl2fgpTnDKVHOA00E515Q87ZHv6awJ3GL1veqi8zfsLaag7rw1TuuHyGLOPkDt
t5PAjsS9R3KI7pGnhqI6bTOi591odUdgzUhZChWUUX1VStiIDi2jCvyoOOLMOGS5
AEYXuWYP7KgujZCDRaTNqRDdgPd93Mh9JI8UmkzXDUgijdzVpzPjYgFaWtyK8lsc
Fizqe3/Yzf9RCVX/lmRbiEH+ql/zSxcWlBQd17PKaL+TisQFXcmQzccYgAxFbj2r
QHp5ABEu9YjFme2Jzun7Mv9V4qo3JF5dmnUk31yupZeAOGZkirIsaWC3hwARAQAB
tDBTYWx0U3RhY2sgU2VjdXJpdHkgVGVhbSA8c2VjdXJpdHlAc2FsdHN0YWNrLmNv
bT6JAj4EEwECACgFAlO15mMCGwMFCQeGH4AGCwkIBwMCBhUIAgkKCwQWAgMBAh4B
AheAAAoJENVw8tNOoHk9z/MP/2vzY27fmVxU5X8joiiturjlgEqQw41IYEmWv1Bw
4WVXYCHP1yu/1MC1uuvOmOd5BlI8YO2C2oyW7d1B0NorguPtz55b7jabCElekVCh
h/H4ZVThiwqgPpthRv/2npXjIm7SLSs/kuaXo6Qy2JpszwDVFw+xCRVL0tH9KJxz
HuNBeVq7abWD5fzIWkmGM9hicG/R2D0RIlco1Q0VNKy8klG+pOFOW886KnwkSPc7
JUYp1oUlHsSlhTmkLEG54cyVzrTP/XuZuyMTdtyTc3mfgW0adneAL6MARtC5UB/h
q+v9dqMf4iD3wY6ctu8KWE8Vo5MUEsNNO9EA2dUR88LwFZ3ZnnXdQkizgR/Aa515
dm17vlNkSoomYCo84eN7GOTfxWcq+iXYSWcKWT4X+h/ra+LmNndQWQBRebVUtbKE
ZDwKmiQz/5LY5EhlWcuU4lVmMSFpWXt5FR/PtzgTdZAo9QKkBjcv97LYbXvsPI69
El1BLAg+m+1UpE1L7zJT1il6PqVyEFAWBxW46wXCCkGssFsvz2yRp0PDX8A6u4yq
rTkt09uYht1is61joLDJ/kq3+6k8gJWkDOW+2NMrmf+/qcdYCMYXmrtOpg/wF27W
GMNAkbdyzgeX/MbUBCGCMdzhevRuivOI5bu4vT5s3KdshG+yhzV45bapKRd5VN+1
mZRqiQJVBBMBCAA/AhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw
9LJL/yqvkNVw8tNOoHk9BQJe1uRXBQkPoTz0AAoJENVw8tNOoHk9akAQANKIDIBY
J3DmWH3g6rWURdREQcBVfMkw6j5MHlIEwlGrN3whSaPv2KR3tatRccBCQ0olQeYb
ZeFtPuf0Du+LqGaAePo5DkPNU7GHoba2+ZE/sJ4wZ4CzAQM6+LvH2iLHeLZ1VLlu
ZEftxD1RFKTqpnav8KiyYGkeFuEn4eMSIhbudp/8wkN40sCWL22D141EhVSRvLlO
BMUpTWdtSYTg0F2pgQL5U2A56syuiwUwPXzQb45JEJILmG8zkeJB9s8kGtErypIH
P+qxJXq24woGUFeJjiLdiOhI6/YoVBACUkKmig36CGf/DH5NAeQECeZq3YBNp7XK
tsF1dPitxuTM/UkOHoHUnGhDlBcQMWe9WuBK4rA+7GH9NT8o7M6+2OKhk181tJ+s
Y2kP7RSXOV162thRsNvVImXajAIFTR3ksEDFGVq/4jh85jFoIbNH3x27NxOu6e2p
OIkXNXmSFXLUmwbfEfIk06gqP3xzkaj+eWHcLDkn9bUKblBJhHdhf9Vsy/N2NRW2
23c64qDutw1NX7msDuN3KXisim+isBzPVVzymkkhkXK+UpjrRR0ePvph3fnGf1bc
NipVtn1KKM7kurSrSjFVLwLi52SGnEHKJnbbhh+AKV09SNYi6IaKL8yw8c1d0K80
PlBaJEvkC6myzaaRtYcna4pbiIysBaZtwDOOuQINBFO15mMBEAC5UuLii9ZLz6qH
fIJp35IOW9U8SOf7QFhzXR7NZ3DmJsd3f6Nb/habQFIHjm3K9wbpj+FvaW2oWRlF
VvYdzjUq6c82GUUjW1dnqgUvFwdmM8351n0YQ2TonmyaF882RvsRZrbJ65uvy7SQ
xlouXaAYOdqwLsPxBEOyOnMPSktW5V2UIWyxsNP3sADchWIGq9p5D3Y/loyIMsS1
dj+TjoQZOKSj7CuRT98+8yhGAY8YBEXu9r3I9o6mDkuPpAljuMc8r09Im6az2egt
K/szKt4Hy1bpSSBZU4W/XR7XwQNywmb3wxjmYT6Od3Mwj0jtzc3gQiH8hcEy3+BO
+NNmyzFVyIwOLziwjmEcw62S57wYKUVnHD2nglMsQa8Ve0e6ABBMEY7zGEGStva5
9rfgeh0jUMJiccGiUDTMs0tdkC6knYKbu/fdRqNYFoNuDcSeLEw4DdCuP01l2W4y
Y+fiK6hAcL25amjzc+yYo9eaaqTn6RATbzdhHQZdpAMxY+vNT0+NhP1Zo5gYBMR6
5Zp/VhFsf67ijb03FUtdw9N8dHwiR2m8vVA8kO/gCD6wS2p9RdXqrJ9JhnHYWjiV
uXR+f755ZAndyQfRtowMdQIoiXuJEXYw6XN+/BX81gJaynJYc0uw0MnxWQX+A5m8
HqEsbIFUXBYXPgbwXTm7c4IHGgXXdwARAQABiQI8BBgBCAAmAhsMFiEEir5O/PD0
skv/Kq+Q1XDy006geT0FAl7W5K0FCQ+hPUoACgkQ1XDy006geT1Q0Q//atnw1D4J
13nL8Mygk+ANY4Xljub/TeZqKtzmnWGso843XysErLH1adCu1KDX1Dj4/o3WoPOt
0O78uSS81N428ocOPKx+fA63n7q1mRqHHy6pLLVKoT66tmvE1ZN0ObaiPK9IxZkB
ThGlHJk9VaUg0vzAaRznogWeBh1dyZktVrtbUO5u4xDX9iql/unVmCWm+U1R7t4q
fqPEbk8ZnWc7x4bAZf8/vSQ93mAbpnRRuJdDK9tsiuhl8pRz7OyzvMS81rVF75ja
7CcShPofrW4yZ7FqAUMwTbfrvsAraWmDjW17Ao7C2dUA9ViwSKJ6u6Pd5no/hwbm
jVoxtO2RvjGOBxKneD36uENAUMBExjDTkSHmOxUYSknrEKUy7P1OL2ZHLG8/rouN
5ZvIxHiMkz12ukSt29IHvCngn1UB4/7+tvDHqug4ZAZPuwH7TC5Hk6WO0OoK8Eb2
sQa2QoehQjwK0IakGd5kFEqKgbrwYPPa3my7l58nOZmPHdMcTOzgKvUEYAITjsT4
oOtocs9Nj+cfCfp6YUn6JeYfiHs+Xhze5igdWIl0ZO5rTmbqcD8A1URKBds0WA+G
FLP9shPC0rS/L3Y1fKhqAc0h+znWBU6xjipTkmzh3FdM8gGT6g9YwGQNbi/x47k5
vtBIWO4LPeGEvb2Gs65PL2eouOqU6yvBr5Y=
=F/97
mQINBGZpxDsBEACz8yoRBXaJiifaWz3wd4FLSO18mgH7H/+0iNTbV1ZwhgGEtWTF
Z31HfrsbxVgICoMgFYt8WKnc4MHZLIgDfTuCFQpf7PV/VqRBAknZwQKEAjHfrYNz
Q1vy3CeKC1qcKQISEQr7VFf58sOC8GJ54jLLc2rCsg9cXI6yvUFtGwL9Qv7g/NZn
rtLjc4NZIKdIvSt+/PtooQtsz0jfLMdMpMFa41keH3MknIbydBUnGj7eC8ANN/iD
Re2QHAW2KfQh3Ocuh/DpJ0/dwbzXmXfMWHk30E+s31TfdLiFt1Iz5kZDF8iHrDMq
x39/GGmF10y5rfq43V1Ucxm+1tl5Km0JcX6GpPUtgRpfUYAxwxfGfezt4PjYRYH2
mNxXXPLsnVTvdWPTvS0msSrcTHmnU5His38I6goXI7dLZm0saqoWi3sqEQ8TPS6/
DkLtYjpb/+dql+KrXD7erd3j8KKflIXn7AEsv+luNk6czGOKgdG9agkklzOHfEPc
xOGmaFfe/1mu8HxgaCuhNAQWlk79ZC+GAm0sBZIQAQRtABgag5vWr16hVix7BPMG
Fp8+caOVv6qfQ7gBmJ3/aso6OzyOxsluVxQRt94EjPTm0xuwb1aYNJOhEj9cPkjQ
XBjo3KN0rwcAViR/fdUzrIV1sn2hms0v5WZ+TDtz1w0OpLZOwe23BDE1+QARAQAB
tEJTYWx0IFByb2plY3QgU2VjdXJpdHkgVGVhbSA8c2FsdHByb2plY3Qtc2VjdXJp
dHkucGRsQGJyb2FkY29tLmNvbT6JAlcEEwEKAEEWIQSZ7ybyZGktJJc6cAfov3an
N2VKBgUCZmnEOwIbAwUJB4TOAAULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAAK
CRDov3anN2VKBk7rD/9QdcYdNGfk96W906HlVpb3JCwT0t9T7ElP97Ot0YN6LqMj
vVQpxWYi7riUSyt1FtlCAM+hmghImzILF9LKDRCZ1H5UStI/u9T53cZpUZtVW/8R
bUNBCl495UcgioIZG5DsfZ/GdBOgY+hQfdgh7HC8a8A/owCt2hHbnth970NQ+LHb
/0ERLfOHRxozgPBhze8Vqf939KlteM5ljgTw/IkJJIsxJi4C6pQntSHvB3/Bq/Nw
Kf3vk3XYFtVibeQODSVvc6useo+SNGV/wsK/6kvh/vfP9Trv/GMOn/89Bj2aL1PR
M382E6sDB9d22p4ehVgbcOpkwHtr9DGerK9xzfG4aUjLu9qVD5Ep3gqKSsCe+P8z
bpADdVCnk+Vdp3Bi+KI7buSkqfbZ0m9vCY3ei1fMiDiTTjvNliL5QCO6PvYNYiDw
+LLImrQThv55ZRQsRRT7J6A94kwDoI6zcBEalv/aPws0nQHJtgWRUpmy5RcbVu9Z
QBXlUpCzCB+gGaGRE1u0hCfuvkbcG1pXFFBdSUuAK4o4ktiRALVUndELic/PU1nR
jwo/+j0SGw/jTwqVChUfLDZbiAQ2JICoVpZ+e1zQfsxa/yDu2e4D543SvNFHDsxh
bsBeCsopzJSA0n2HAdYvPxOPoWVvZv+U8ZV3EEVOUgsO5//cRJddCgLU89Q4DrkC
DQRmacQ7ARAAsz8jnpfw3DCRxdCVGiqWAtgj8r2gx5n1wJsKsgvyGQdKUtPwlX04
7w13lIDT2DwoXFozquYsTn9XkIoWbVckqo0NN/V7/QxIZIYTqRcFXouHTbXDJm5C
tsvfDlnTsaplyRawPU2mhYg39/lzIt8zIjvy5zo/pElkRP5m03nG+ItrsHN6CCvf
ZiRxme6EQdn+aoHh2GtICL8+c3HvQzTHYKxFn84Ibt3uNxwt+Mu6YhG9tkYMQQk5
SkYA4CYAaw2Lc/g0ee36iqw/5d79M8YcQtHhy5zzqgdEvExjFPdowV1hhFIEkNkM
uqIAknXVesqLLw2hPeYmyhYQqeBKIrWmBhBKX9c0vMYkDDH3T/sSylVhH0QAXP6E
WmLja3E1ov6pt6j7j/wWzC9LSMFDJI2yWCeOE1oea5D89tH6XvsGRTiog62zF/9a
77197iIa0+o91chp4iLkzDvuK8pVujPx8bNsK8jlJ+OW73NmliCVg+hecoFLNsri
/TsBngFNVcu79Q1XfyvoDdR2C09ItCBEZGt6LOlq/+ATUw1aBz6L1hvLBtiR3Hfu
X31YlbxdvVPjlzg6O6GXSfnokNTWv2mVXWTRIrP0RrKvMyiNPXVW7EunUuXI0Axk
Xg3E5kAjKXkBXzoCTCVz/sXPLjvjI0x3Z7obgPpcTi9h5DIX6PFyK/kAEQEAAYkC
PAQYAQoAJhYhBJnvJvJkaS0klzpwB+i/dqc3ZUoGBQJmacQ7AhsMBQkHhM4AAAoJ
EOi/dqc3ZUoGDeAQAKbyiHA1sl0fnvcZxoZ3mWA/Qesddp7Nv2aEW8I3hAJoTVml
ZvMxk8leZgsQJtSsVDNnxeyW+WCIUkhxmd95UlkTTj5mpyci1YrxAltPJ2TWioLe
F2doP8Y+4iGnaV+ApzWG33sLr95z37RKVdMuGk/O5nLMeWnSPA7HHWJCxECMm0SH
uI8aby8w2aBZ1kOMFB/ToEEzLBu9fk+zCzG3uH8QhdciMENVhsyBSULIrmwKglyI
VQwj2dXHyekQh7QEHV+CdKMfs3ZOANwm52OwjaK0dVb3IMFGvlUf4UXXfcXwLAkj
vW+Ju4kLGxVQpOlh1EBain9WOaHZGh6EGuTpjJO32PyRq8iSMNb8coeonoPFWrE/
A5dy3z5x5CZhJ6kyNwYs/9951r30Ct9qNZo9WZwp8AGQVs+J9XEYnZIWXnO1hdKs
dRStPvY7VqS500t8eWqWRfCLgofZAb9Fv7SwTPQ2G7bOuTXmQKAIEkU9vzo5XACu
AtR/9bC9ghNnlNuH4xiViBclrq2dif/I2ZwItpQHjuCDeMKz9kdADRI0tuNPpRHe
QP1YpURW+I+PYZzNgbnwzl6Bxo7jCHFgG6BQ0ih5sVwEDhlXjSejd8CNMYEy3ElL
xJLUpltwXLZSrJEXYjtJtnh0om71NXes0OyWE1cL4+U6WA9Hho6xedjk2bai
=pPmt
-----END PGP PUBLIC KEY BLOCK-----
```

1
changelog/57649.fixed.md Normal file
View file

@ -0,0 +1 @@
Update to include croniter in pkg requirements

1
changelog/64300.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix utf8 handling in 'pass' renderer

1
changelog/65251.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix config.items when called on minion

1
changelog/65304.fixed.md Normal file
View file

@ -0,0 +1 @@
pkg.installed state aggregate does not honors requires requisite

1
changelog/65816.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM

1
changelog/66414.fixed.md Normal file
View file

@ -0,0 +1 @@
Fixed x509_v2 certificate.managed crash for locally signed certificates if the signing policy defines signing_private_key

1
changelog/66579.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix support for FIPS approved encryption and signing algorithms.

1
changelog/66604.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix RPM package provides

View file

@ -0,0 +1 @@
Drop CentOS 7 support

1
changelog/66624.added.md Normal file
View file

@ -0,0 +1 @@
Build RPM packages with Rocky Linux 9 (instead of CentOS Stream 9)

View file

@ -0,0 +1 @@
No longer build RPM packages with CentOS Stream 9

1
changelog/66632.fixed.md Normal file
View file

@ -0,0 +1 @@
Upgrade relAenv to 0.16.1. This release fixes several package installs for salt-pip

View file

@ -49,26 +49,6 @@
"is_windows": "false",
"ssh_username": "arch"
},
"centos-7-arm64": {
"ami": "ami-0ef52419c91cb0169",
"ami_description": "CI Image of CentOS 7 arm64",
"ami_name": "salt-project/ci/centos/7/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "centos"
},
"centos-7": {
"ami": "ami-0973c8d1b91dcba5c",
"ami_description": "CI Image of CentOS 7 x86_64",
"ami_name": "salt-project/ci/centos/7/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "centos"
},
"debian-11-arm64": {
"ami": "ami-0eff227d9a94d8692",
"ami_description": "CI Image of Debian 11 arm64",

View file

@ -1,6 +1,6 @@
nox_version: "2022.8.7"
python_version: "3.10.14"
relenv_version: "0.16.0"
relenv_version: "0.16.1"
release_branches:
- "3006.x"
- "3007.x"

View file

@ -2080,6 +2080,20 @@ The number of seconds between AES key rotations on the master.
.. conf_master:: ssl
``publish_signing_algorithm``
-----------------------------
.. versionadded:: 3006.9
Default: PKCS1v15-SHA1
The RSA signing algorithm used by this minion when connecting to the
master's request channel. Valid values are ``PKCS1v15-SHA1`` and
``PKCS1v15-SHA224``. Minions must be at version ``3006.9`` or greater if this
is changed from the default setting.
``ssl``
-------

View file

@ -3170,6 +3170,28 @@ constant names without ssl module prefix: ``CERT_REQUIRED`` or ``PROTOCOL_SSLv23
certfile: <path_to_certfile>
ssl_version: PROTOCOL_TLSv1_2
``encryption_algorithm``
------------------------
.. versionadded:: 3006.9
Default: OAEP-SHA1
The RSA encryption algorithm used by this minion when connecting to the
master's request channel. Valid values are ``OAEP-SHA1`` and ``OAEP-SHA224``
``signing_algorithm``
------------------------
.. versionadded:: 3006.9
Default: PKCS1v15-SHA1
The RSA signing algorithm used by this minion when connecting to the
master's request channel. Valid values are ``PKCS1v15-SHA1`` and
``PKCS1v15-SHA224``
Reactor Settings
================

View file

@ -5,8 +5,8 @@ Security disclosure policy
==========================
:email: saltproject-security.pdl@broadcom.com
:gpg key ID: 4EA0793D
:gpg key fingerprint: ``8ABE 4EFC F0F4 B24B FF2A AF90 D570 F2D3 4EA0 793D``
:gpg key ID: 37654A06
:gpg key fingerprint: ``99EF 26F2 6469 2D24 973A 7007 E8BF 76A7 3765 4A06``
**gpg public key:**
@ -14,104 +14,55 @@ Security disclosure policy
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBFO15mMBEADa3CfQwk5ED9wAQ8fFDku277CegG3U1hVGdcxqKNvucblwoKCb
hRK6u9ihgaO9V9duV2glwgjytiBI/z6lyWqdaD37YXG/gTL+9Md+qdSDeaOa/9eg
7y+g4P+FvU9HWUlujRVlofUn5Dj/IZgUywbxwEybutuzvvFVTzsn+DFVwTH34Qoh
QIuNzQCSEz3Lhh8zq9LqkNy91ZZQO1ZIUrypafspH6GBHHcE8msBFgYiNBnVcUFH
u0r4j1Rav+621EtD5GZsOt05+NJI8pkaC/dDKjURcuiV6bhmeSpNzLaXUhwx6f29
Vhag5JhVGGNQxlRTxNEM86HEFp+4zJQ8m/wRDrGX5IAHsdESdhP+ljDVlAAX/ttP
/Ucl2fgpTnDKVHOA00E515Q87ZHv6awJ3GL1veqi8zfsLaag7rw1TuuHyGLOPkDt
t5PAjsS9R3KI7pGnhqI6bTOi591odUdgzUhZChWUUX1VStiIDi2jCvyoOOLMOGS5
AEYXuWYP7KgujZCDRaTNqRDdgPd93Mh9JI8UmkzXDUgijdzVpzPjYgFaWtyK8lsc
Fizqe3/Yzf9RCVX/lmRbiEH+ql/zSxcWlBQd17PKaL+TisQFXcmQzccYgAxFbj2r
QHp5ABEu9YjFme2Jzun7Mv9V4qo3JF5dmnUk31yupZeAOGZkirIsaWC3hwARAQAB
tDBTYWx0U3RhY2sgU2VjdXJpdHkgVGVhbSA8c2VjdXJpdHlAc2FsdHN0YWNrLmNv
bT6JAj4EEwECACgFAlO15mMCGwMFCQeGH4AGCwkIBwMCBhUIAgkKCwQWAgMBAh4B
AheAAAoJENVw8tNOoHk9z/MP/2vzY27fmVxU5X8joiiturjlgEqQw41IYEmWv1Bw
4WVXYCHP1yu/1MC1uuvOmOd5BlI8YO2C2oyW7d1B0NorguPtz55b7jabCElekVCh
h/H4ZVThiwqgPpthRv/2npXjIm7SLSs/kuaXo6Qy2JpszwDVFw+xCRVL0tH9KJxz
HuNBeVq7abWD5fzIWkmGM9hicG/R2D0RIlco1Q0VNKy8klG+pOFOW886KnwkSPc7
JUYp1oUlHsSlhTmkLEG54cyVzrTP/XuZuyMTdtyTc3mfgW0adneAL6MARtC5UB/h
q+v9dqMf4iD3wY6ctu8KWE8Vo5MUEsNNO9EA2dUR88LwFZ3ZnnXdQkizgR/Aa515
dm17vlNkSoomYCo84eN7GOTfxWcq+iXYSWcKWT4X+h/ra+LmNndQWQBRebVUtbKE
ZDwKmiQz/5LY5EhlWcuU4lVmMSFpWXt5FR/PtzgTdZAo9QKkBjcv97LYbXvsPI69
El1BLAg+m+1UpE1L7zJT1il6PqVyEFAWBxW46wXCCkGssFsvz2yRp0PDX8A6u4yq
rTkt09uYht1is61joLDJ/kq3+6k8gJWkDOW+2NMrmf+/qcdYCMYXmrtOpg/wF27W
GMNAkbdyzgeX/MbUBCGCMdzhevRuivOI5bu4vT5s3KdshG+yhzV45bapKRd5VN+1
mZRqiQJVBBMBAgA/AhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw
9LJL/yqvkNVw8tNOoHk9BQJb0e5rBQkL3m8IAAoJENVw8tNOoHk9fzMP/ApQtkQD
BmoYEBTF6BH1bywzDw5OHpnBSLbuoYtA3gkhnm/83MzFDcGn22pgo2Fv0MuHltWI
G2oExzje7szmcM6Xg3ZTKapJ3/p2J+P33tkJA1LWpg+DdgdQlqrjlXKwEnikszuB
9IMhbjoPeBzwiUtsBQmcwbVgwMzbscwoV5DJ/gLDCkgF4rp2uKEYAcBi8s9NGX6p
zQsb9Sb0/bKdCrszAcvUn4WYB6WbAPttvutYHtg/nZfXEeX/SgBueXo3lO9vzFlO
r3Zgk7WeucsEqa9Qo0VLOq28HykixM5mEJKsAQrNIqM1DqXgfDch8RJAHzgMBHFH
Qi9hJXk1/6OA2FPXQGcA9Td5Dt0i1Z7wMrAUMj3s9gNMVCD0hQqEKfUtpyV7KBAj
AO5j8Wr8KafnRm6czBCkcV0SRzHQSHdYyncozWwPgWOaRC9AY9fEDz8lBaSoB/C+
dyO/xZMTWoaWqkHozVoHIrCc4CAtZTye/5mxFhq15Q1Iy/NjelrMTCD1kql1dNIP
oOgfOYl1xLMQIBwrrCrgeRIvxEgKRf9KOLbSrS7+3vOKoxf+LD4AQfLci8dFyH+I
t0Z43nk93yTOI82RTdz5GwUXIKcvGhsJ8bgNlGTxM1R/Sl8Sg8diE2PRAp/fk7+g
CwOM8VkeyrDM2k1cy64d8USkbR7YtT3otyFQiQJVBBMBCAA/AhsDBgsJCAcDAgYV
CAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw9LJL/yqvkNVw8tNOoHk9BQJeapbNBQkN
v4KKAAoJENVw8tNOoHk9BFQP/04a1yQb3aOYbNgx+ER9l54wZbUUlReU+ujmlW03
12ZW8fFZ0SN2q7xKtE/I9nNl1gjJ7NHTP3FhZ0eNyG+mJeGyrscVKxaAkTV+71e3
7n94/qC2bM753X+2160eR7Md+R/itoljStwmib1583rSTTUld1i4FnUTrEhF7MBt
I/+5l7vUK4Hj1RPovHVeHXYfdbrS6wCBi6GsdOfYGfGacZIfM4XLXTkyjVt4Zg0j
rwZ36P1amHky1QyvQ2stkXjCEtP04h3o3EfC1yupNXarO1VXj10/wWYhoGAz6AT2
Usk6DiaiJqHPy2RwPfKzv7ZrUlMxKrqjPUHcoBf++EjzFtR3LJ0pY2fLwp6Pk4s4
18Xwi7r16HnCH/BZgqZVyXAhDV6+U9rAHab/n4b0hcWWaT2SIhsyZKtEMiTMJeq5
aAMcRSWX+dHO+MzMIBzNu7BO3b+zODD0+XSMsPqeHp3cqfZ3EHobKQPPFucdfjug
Hx2+dbPD3IwJVIilc9Otfz/+JYG4im5p4N6UCwXHbtiuuREC1SQpU9BqEjQAyIiL
gXlE5MSVqXijkrIpYB+K8cR+44nQ4K2kc4ievNqXR6D7XQ3AE76QN84Lby2b5W86
bbboIy0Bgy+9jgCx0CS7fk1P8zx1dw2FNDVfxZ+s473ZvwP1wdSRZICjZUvM8hx4
4kPCiQJVBBMBCAA/AhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw
9LJL/yqvkNVw8tNOoHk9BQJiOkMeBQkUJ/c7AAoJENVw8tNOoHk9Xx8P/26W8v/v
Exmttzcqh7MlihddXfr2lughSuUBQ8aLsffGHSGIgyqSPlq0Fl5qOCoJ8hYZSBqV
yEfo7iRY7E3K1LGXKDkpup9hC1wMjR0A25eoXwEnD2vEQ/upXXueH05vkcMc165B
cK0kNxas+2amCc3nHJOlfWILXQk4OS+nB0lBWe8H96ppfAaX/G0JiYsa0hjNycZq
0ftEdCkAJRvSFuu6d3gXH69KLxoNcJOE+99f3wMOuOcX3Xf1k/cwqdJRdEiW8oz8
Gf5ZRzWcpsXXg6nB2mkahLoRDMM2U+1C6fHbUg4yTvU1AB+F/OYqe1d0hedho0o5
+WWoTuM/U79+m3NM14qvr0iJP7ytABiEE96nNAz+Q0NDZqA6JoUd7obo8KVjGHEt
9bRl/8K/zWkdNLoF84tWjEiBCzCKXGEay7lgiIx5f3OvP91CfGL+ILHrk/AZR1eE
M+KI7wB8sJEFF95UoKVua3YzLIFScB4bUEOg6bz8xSSP4a0BWktSm5ws8iCWqOE6
S9haCppZ7a6k5czQNPJV2bp2eTS4ykFAQLv/mHMS5awIvb8b630Rufn1vZHKCrMf
WdSbBZD7oojxYo1psPlfzN2KUrNXgl7vAUNagJEogMoiYAZ2ML7rTVAC1qnbxQb+
DeC+r0I98AIY6igIgRbcybH3ccfXYNtcxLUJuQINBFO15mMBEAC5UuLii9ZLz6qH
fIJp35IOW9U8SOf7QFhzXR7NZ3DmJsd3f6Nb/habQFIHjm3K9wbpj+FvaW2oWRlF
VvYdzjUq6c82GUUjW1dnqgUvFwdmM8351n0YQ2TonmyaF882RvsRZrbJ65uvy7SQ
xlouXaAYOdqwLsPxBEOyOnMPSktW5V2UIWyxsNP3sADchWIGq9p5D3Y/loyIMsS1
dj+TjoQZOKSj7CuRT98+8yhGAY8YBEXu9r3I9o6mDkuPpAljuMc8r09Im6az2egt
K/szKt4Hy1bpSSBZU4W/XR7XwQNywmb3wxjmYT6Od3Mwj0jtzc3gQiH8hcEy3+BO
+NNmyzFVyIwOLziwjmEcw62S57wYKUVnHD2nglMsQa8Ve0e6ABBMEY7zGEGStva5
9rfgeh0jUMJiccGiUDTMs0tdkC6knYKbu/fdRqNYFoNuDcSeLEw4DdCuP01l2W4y
Y+fiK6hAcL25amjzc+yYo9eaaqTn6RATbzdhHQZdpAMxY+vNT0+NhP1Zo5gYBMR6
5Zp/VhFsf67ijb03FUtdw9N8dHwiR2m8vVA8kO/gCD6wS2p9RdXqrJ9JhnHYWjiV
uXR+f755ZAndyQfRtowMdQIoiXuJEXYw6XN+/BX81gJaynJYc0uw0MnxWQX+A5m8
HqEsbIFUXBYXPgbwXTm7c4IHGgXXdwARAQABiQI8BBgBAgAmAhsMFiEEir5O/PD0
skv/Kq+Q1XDy006geT0FAlvR7oMFCQvebyAACgkQ1XDy006geT2Hxw//Zha8j8Uc
4B+DmHhZIvPmHp9aFI4DWhC7CBDrYKztBz42H6eX+UsBu4p+uBDKdW9xJH+Qt/zF
nf/zB5Bhc/wFceVRCAkWxPdiIQeo5XQGjZeORjle7E9iunTko+5q1q9I7IgqWYrn
jRmulDvRhO7AoUrqGACDrV6t0F1/XPB8seR2i6axFmFlt1qBHasRq11yksdgNYiD
KXaovf7csDGPGOCWEKMX7BFGpdK/dWdNYfH0Arfom0U5TqNfvGtP4yRPx2bcs7/1
VXPj7IqhBgOtA9pwtMjFki8HGkqj7bB2ErFBOnSwqqNnNcbnhiO6D74SHVGAHhKZ
whaMPDg76EvjAezoLHg7KWYOyUkWJSLa+YoM9r4+PJuEuW/XuaZCNbrAhek+p3pD
ywhElvZe/2UFk619qKzwSbTzk7a90rxLQ2wwtd0vxAW/GyjWl4/kOMZhI5+LAk1l
REucE0fSQxzCTeXu2ObvFR9ic02IYGH3Koz8CrGReEI1J05041Y5IhKxdsvGOD2W
e7ymcblYW4Gz8eYFlLeNJkj/38R7qmNZ028XHzAZDCAWDiTFrnCoglyk+U0JRHfg
HTsdvoc8mBdT/s24LhnfAbpLizlrZZquuOF6NLQSkbuLtmIwf+h9ynEEJxEkGGWg
7JqB1tMjNHLkRpveO/DTYB+iffpba1nCgumJAjwEGAEIACYCGwwWIQSKvk788PSy
S/8qr5DVcPLTTqB5PQUCYjpDOQUJFCf3VgAKCRDVcPLTTqB5PYDiEADaj1aAdXDb
+XrlhzlGCT3e16RDiE4BjSD1KHZX8ZDABI79JDG0iMN2PpWuViXq7AvWuwgNYdac
WjHsZGgHW82UoPVGKnfEVjjf0lQQIIcgdS5dEV8LamkeIo4vKUX/MZY+Mivk6luP
vCec9Euj/XU1nY6gGq6inpwDtZkNoJlCBune/IIGS82dU8RrSGAHNRZoaDJfdfQm
j7YAOWCUqyzn747yMyuMUOc15iJIgOz1dKN5YwDmFkzjlw+616Aswcp8UA0OfOQ+
e4THli32BgKTSNeOGhGgx1xCDkt+0gP1L0L2Sqhlr6BnqNF65mQ4j2v6UGY1noCo
jYxFchoa1zEdEiZRr/sRO91XlJtK7HyIAI0cUHKVU+Cayoh//OBQBJnbeZlfh9Qn
4ead1pTz9bcKIeZleAjlzNG249bGY+82WsFghb4/7U9MYJVePz0m1zJKPkdABZ+R
lSDvhf4ImesfH5UuofZFv1UXmQL4yV7PDXXdy2xhma7YLznyZTUobDoJiZbuO72O
g5HJCpYoNfvGx++Z9naomUWufqi9PWigEMxU8lUtiGaLQrDW3inTOZTTmTnsJiAI
Lhku0Jr4SjCqxoEFydXOGvNV5XB4WXvf+A6JhcZI+/S72ai1CeSgMFiJLAEb2MZ+
fwPKmQ2cKnCBs5ASj1DkgUcz2c8DTUPVqg==
=i1Tf
mQINBGZpxDsBEACz8yoRBXaJiifaWz3wd4FLSO18mgH7H/+0iNTbV1ZwhgGEtWTF
Z31HfrsbxVgICoMgFYt8WKnc4MHZLIgDfTuCFQpf7PV/VqRBAknZwQKEAjHfrYNz
Q1vy3CeKC1qcKQISEQr7VFf58sOC8GJ54jLLc2rCsg9cXI6yvUFtGwL9Qv7g/NZn
rtLjc4NZIKdIvSt+/PtooQtsz0jfLMdMpMFa41keH3MknIbydBUnGj7eC8ANN/iD
Re2QHAW2KfQh3Ocuh/DpJ0/dwbzXmXfMWHk30E+s31TfdLiFt1Iz5kZDF8iHrDMq
x39/GGmF10y5rfq43V1Ucxm+1tl5Km0JcX6GpPUtgRpfUYAxwxfGfezt4PjYRYH2
mNxXXPLsnVTvdWPTvS0msSrcTHmnU5His38I6goXI7dLZm0saqoWi3sqEQ8TPS6/
DkLtYjpb/+dql+KrXD7erd3j8KKflIXn7AEsv+luNk6czGOKgdG9agkklzOHfEPc
xOGmaFfe/1mu8HxgaCuhNAQWlk79ZC+GAm0sBZIQAQRtABgag5vWr16hVix7BPMG
Fp8+caOVv6qfQ7gBmJ3/aso6OzyOxsluVxQRt94EjPTm0xuwb1aYNJOhEj9cPkjQ
XBjo3KN0rwcAViR/fdUzrIV1sn2hms0v5WZ+TDtz1w0OpLZOwe23BDE1+QARAQAB
tEJTYWx0IFByb2plY3QgU2VjdXJpdHkgVGVhbSA8c2FsdHByb2plY3Qtc2VjdXJp
dHkucGRsQGJyb2FkY29tLmNvbT6JAlcEEwEKAEEWIQSZ7ybyZGktJJc6cAfov3an
N2VKBgUCZmnEOwIbAwUJB4TOAAULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAAK
CRDov3anN2VKBk7rD/9QdcYdNGfk96W906HlVpb3JCwT0t9T7ElP97Ot0YN6LqMj
vVQpxWYi7riUSyt1FtlCAM+hmghImzILF9LKDRCZ1H5UStI/u9T53cZpUZtVW/8R
bUNBCl495UcgioIZG5DsfZ/GdBOgY+hQfdgh7HC8a8A/owCt2hHbnth970NQ+LHb
/0ERLfOHRxozgPBhze8Vqf939KlteM5ljgTw/IkJJIsxJi4C6pQntSHvB3/Bq/Nw
Kf3vk3XYFtVibeQODSVvc6useo+SNGV/wsK/6kvh/vfP9Trv/GMOn/89Bj2aL1PR
M382E6sDB9d22p4ehVgbcOpkwHtr9DGerK9xzfG4aUjLu9qVD5Ep3gqKSsCe+P8z
bpADdVCnk+Vdp3Bi+KI7buSkqfbZ0m9vCY3ei1fMiDiTTjvNliL5QCO6PvYNYiDw
+LLImrQThv55ZRQsRRT7J6A94kwDoI6zcBEalv/aPws0nQHJtgWRUpmy5RcbVu9Z
QBXlUpCzCB+gGaGRE1u0hCfuvkbcG1pXFFBdSUuAK4o4ktiRALVUndELic/PU1nR
jwo/+j0SGw/jTwqVChUfLDZbiAQ2JICoVpZ+e1zQfsxa/yDu2e4D543SvNFHDsxh
bsBeCsopzJSA0n2HAdYvPxOPoWVvZv+U8ZV3EEVOUgsO5//cRJddCgLU89Q4DrkC
DQRmacQ7ARAAsz8jnpfw3DCRxdCVGiqWAtgj8r2gx5n1wJsKsgvyGQdKUtPwlX04
7w13lIDT2DwoXFozquYsTn9XkIoWbVckqo0NN/V7/QxIZIYTqRcFXouHTbXDJm5C
tsvfDlnTsaplyRawPU2mhYg39/lzIt8zIjvy5zo/pElkRP5m03nG+ItrsHN6CCvf
ZiRxme6EQdn+aoHh2GtICL8+c3HvQzTHYKxFn84Ibt3uNxwt+Mu6YhG9tkYMQQk5
SkYA4CYAaw2Lc/g0ee36iqw/5d79M8YcQtHhy5zzqgdEvExjFPdowV1hhFIEkNkM
uqIAknXVesqLLw2hPeYmyhYQqeBKIrWmBhBKX9c0vMYkDDH3T/sSylVhH0QAXP6E
WmLja3E1ov6pt6j7j/wWzC9LSMFDJI2yWCeOE1oea5D89tH6XvsGRTiog62zF/9a
77197iIa0+o91chp4iLkzDvuK8pVujPx8bNsK8jlJ+OW73NmliCVg+hecoFLNsri
/TsBngFNVcu79Q1XfyvoDdR2C09ItCBEZGt6LOlq/+ATUw1aBz6L1hvLBtiR3Hfu
X31YlbxdvVPjlzg6O6GXSfnokNTWv2mVXWTRIrP0RrKvMyiNPXVW7EunUuXI0Axk
Xg3E5kAjKXkBXzoCTCVz/sXPLjvjI0x3Z7obgPpcTi9h5DIX6PFyK/kAEQEAAYkC
PAQYAQoAJhYhBJnvJvJkaS0klzpwB+i/dqc3ZUoGBQJmacQ7AhsMBQkHhM4AAAoJ
EOi/dqc3ZUoGDeAQAKbyiHA1sl0fnvcZxoZ3mWA/Qesddp7Nv2aEW8I3hAJoTVml
ZvMxk8leZgsQJtSsVDNnxeyW+WCIUkhxmd95UlkTTj5mpyci1YrxAltPJ2TWioLe
F2doP8Y+4iGnaV+ApzWG33sLr95z37RKVdMuGk/O5nLMeWnSPA7HHWJCxECMm0SH
uI8aby8w2aBZ1kOMFB/ToEEzLBu9fk+zCzG3uH8QhdciMENVhsyBSULIrmwKglyI
VQwj2dXHyekQh7QEHV+CdKMfs3ZOANwm52OwjaK0dVb3IMFGvlUf4UXXfcXwLAkj
vW+Ju4kLGxVQpOlh1EBain9WOaHZGh6EGuTpjJO32PyRq8iSMNb8coeonoPFWrE/
A5dy3z5x5CZhJ6kyNwYs/9951r30Ct9qNZo9WZwp8AGQVs+J9XEYnZIWXnO1hdKs
dRStPvY7VqS500t8eWqWRfCLgofZAb9Fv7SwTPQ2G7bOuTXmQKAIEkU9vzo5XACu
AtR/9bC9ghNnlNuH4xiViBclrq2dif/I2ZwItpQHjuCDeMKz9kdADRI0tuNPpRHe
QP1YpURW+I+PYZzNgbnwzl6Bxo7jCHFgG6BQ0ih5sVwEDhlXjSejd8CNMYEy3ElL
xJLUpltwXLZSrJEXYjtJtnh0om71NXes0OyWE1cL4+U6WA9Hho6xedjk2bai
=pPmt
-----END PGP PUBLIC KEY BLOCK-----
The SaltStack Security Team is available at saltproject-security.pdl@broadcom.com for

View file

@ -10,8 +10,9 @@
%define __brp_python_hardlink /usr/bin/true
# Disable private libraries from showing in provides
%global __provides_exclude_from ^lib/.*\\.so.*$
%global __requires_exclude_from ^lib/.*\\.so.*$
%global __to_exclude .*\\.so.*
%global __provides_exclude_from ^.*$
%global __requires_exclude_from ^.*$
%define _source_payload w2.gzdio
%define _binary_payload w2.gzdio
%define _SALT_GROUP salt

View file

@ -119,12 +119,12 @@ if ( $install_build_tools ) {
# Hash: 3b1efd3a66ea28b16697394703a72ca340a05bd5
if (! (Test-Path -Path Cert:\LocalMachine\Root\3b1efd3a66ea28b16697394703a72ca340a05bd5) ) {
Write-Host "Installing Certificate Sign Root Certificate: " -NoNewLine
Start-Process -FilePath "certutil" `
$proc = Start-Process -FilePath "certutil" `
-ArgumentList "-addstore", `
"Root", `
"$($env:TEMP)\build_tools\certificates\manifestCounterSignRootCertificate.cer" `
-Wait -WindowStyle Hidden
if ( Test-Path -Path Cert:\LocalMachine\Root\3b1efd3a66ea28b16697394703a72ca340a05bd5 ) {
-PassThru -Wait -WindowStyle Hidden
if ( $proc.ExitCode -eq 0 ) {
Write-Result "Success" -ForegroundColor Green
} else {
Write-Result "Failed" -ForegroundColor Yellow
@ -135,12 +135,12 @@ if ( $install_build_tools ) {
# Hash: 8f43288ad272f3103b6fb1428485ea3014c0bcfe
if (! (Test-Path -Path Cert:\LocalMachine\Root\8f43288ad272f3103b6fb1428485ea3014c0bcfe) ) {
Write-Host "Installing Certificate Root Certificate: " -NoNewLine
Start-Process -FilePath "certutil" `
$proc = Start-Process -FilePath "certutil" `
-ArgumentList "-addstore", `
"Root", `
"$($env:TEMP)\build_tools\certificates\manifestRootCertificate.cer" `
-Wait -WindowStyle Hidden
if ( Test-Path -Path Cert:\LocalMachine\Root\8f43288ad272f3103b6fb1428485ea3014c0bcfe ) {
-PassThru -Wait -WindowStyle Hidden
if ( $proc.ExitCode -eq 0 ) {
Write-Result "Success" -ForegroundColor Green
} else {
Write-Result "Failed" -ForegroundColor Yellow
@ -148,14 +148,13 @@ if ( $install_build_tools ) {
}
Write-Host "Installing Visual Studio 2017 build tools: " -NoNewline
Start-Process -FilePath "$env:TEMP\build_tools\vs_setup.exe" `
$proc = Start-Process -FilePath "$env:TEMP\build_tools\vs_setup.exe" `
-ArgumentList "--wait", "--noweb", "--quiet" `
-Wait
@($VS_CL_BIN, $MSBUILD_BIN, $WIN10_SDK_RC) | ForEach-Object {
if ( ! (Test-Path -Path $_) ) {
Write-Result "Failed" -ForegroundColor Red
exit 1
}
-Wait -PassThru
if ( $proc.ExitCode -eq 0 ) {
Write-Result "Success" -ForegroundColor Green
} else {
Write-Result "Failed" -ForegroundColor Yellow
}
Write-Result "Success" -ForegroundColor Green
} else {

View file

@ -12,6 +12,7 @@ packaging>=21.3
looseversion
tornado>=6.3.3
aiohttp>=3.9.0
croniter>=0.3.0,!=0.3.22; sys_platform != 'win32'
# We need contextvars for salt-ssh.
# Even on python versions which ships with contextvars in the standard library!

View file

@ -12,7 +12,6 @@ certifi>=2022.12.07
cffi>=1.14.6
cherrypy>=17.4.1
clustershell
croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32'
dnspython
etcd3-py==0.1.6
gitpython>=3.1.37

View file

@ -99,8 +99,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
@ -414,6 +416,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -98,8 +98,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
@ -418,6 +420,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -111,8 +111,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
@ -456,6 +458,7 @@ python-telegram-bot==20.3
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# croniter
# tempora
# twilio
pyvmomi==8.0.1.0.1

View file

@ -94,8 +94,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
@ -407,6 +409,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -93,8 +93,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
@ -411,6 +413,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -106,8 +106,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
@ -447,6 +449,7 @@ python-telegram-bot==20.3
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# croniter
# tempora
# twilio
pyvmomi==8.0.1.0.1

View file

@ -125,10 +125,11 @@ contextvars==2.4
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/static/ci/common.in
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
@ -582,6 +583,7 @@ pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via

View file

@ -94,8 +94,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
@ -407,6 +409,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -53,6 +53,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/py3.12/linux.txt
@ -195,6 +199,7 @@ python-dateutil==2.8.2
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via
# -c requirements/static/ci/py3.12/linux.txt
@ -202,6 +207,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# croniter
# tempora
pyyaml==6.0.1
# via

View file

@ -93,8 +93,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
@ -411,6 +413,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -143,10 +143,11 @@ contextvars==2.4
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/static/ci/common.in
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
@ -570,6 +571,7 @@ pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# croniter
# tempora
# twilio
pyvmomi==8.0.1.0.1

View file

@ -106,8 +106,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
@ -447,6 +449,7 @@ python-telegram-bot==20.3
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# croniter
# tempora
# twilio
pyvmomi==8.0.1.0.1

View file

@ -98,8 +98,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
@ -422,6 +424,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -107,8 +107,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.8/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.8/linux.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.8/linux.txt
@ -453,6 +455,7 @@ python-telegram-bot==20.3
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.8/linux.txt
# croniter
# tempora
# twilio
pyvmomi==8.0.1.0.1

View file

@ -99,8 +99,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
@ -414,6 +416,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -98,8 +98,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
@ -418,6 +420,7 @@ python-gnupg==0.5.2
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# croniter
# tempora
pyvmomi==8.0.1.0.1
# via -r requirements/static/ci/common.in

View file

@ -107,8 +107,10 @@ contextvars==2.4
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# -r requirements/base.txt
croniter==1.3.15 ; sys_platform != "win32"
# via -r requirements/static/ci/common.in
croniter==2.0.5 ; sys_platform != "win32"
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# -r requirements/base.txt
cryptography==42.0.5
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
@ -449,6 +451,7 @@ python-telegram-bot==20.3
pytz==2024.1
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# croniter
# tempora
# twilio
pyvmomi==8.0.1.0.1

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -98,11 +100,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -98,11 +100,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -98,11 +100,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -26,6 +26,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -96,11 +98,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -26,6 +26,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -96,11 +98,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -26,6 +26,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -96,11 +98,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -26,6 +26,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -96,11 +98,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -26,6 +26,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -96,11 +98,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -26,6 +26,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -96,11 +98,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -100,11 +102,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -100,11 +102,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -98,11 +100,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -98,11 +100,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -28,6 +28,8 @@ cherrypy==18.8.0
# via -r requirements/base.txt
contextvars==2.4
# via -r requirements/base.txt
croniter==2.0.5 ; sys_platform != "win32"
# via -r requirements/base.txt
cryptography==42.0.5
# via
# -r requirements/base.txt
@ -98,11 +100,15 @@ pydantic==2.6.4
pyopenssl==24.0.0
# via -r requirements/base.txt
python-dateutil==2.8.2
# via -r requirements/base.txt
# via
# -r requirements/base.txt
# croniter
python-gnupg==0.5.2
# via -r requirements/base.txt
pytz==2024.1
# via tempora
# via
# croniter
# tempora
pyyaml==6.0.1
# via -r requirements/base.txt
pyzmq==25.1.2

View file

@ -24,21 +24,6 @@ import salt.utils.verify
import salt.utils.versions
from salt.utils.asynchronous import SyncWrapper
try:
from M2Crypto import RSA
HAS_M2 = True
except ImportError:
HAS_M2 = False
try:
from Cryptodome.Cipher import PKCS1_OAEP
except ImportError:
try:
from Crypto.Cipher import PKCS1_OAEP # nosec
except ImportError:
pass
log = logging.getLogger(__name__)
REQUEST_CHANNEL_TIMEOUT = 60
@ -168,11 +153,15 @@ class AsyncReqChannel:
return self.transport.ttype
def _package_load(self, load):
return {
ret = {
"enc": self.crypt,
"load": load,
"version": 2,
}
if self.crypt == "aes":
ret["enc_algo"] = self.opts["encryption_algorithm"]
ret["sig_algo"] = self.opts["signing_algorithm"]
return ret
@tornado.gen.coroutine
def _send_with_retry(self, load, tries, timeout):
@ -223,11 +212,7 @@ class AsyncReqChannel:
tries,
timeout,
)
if HAS_M2:
aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding)
else:
cipher = PKCS1_OAEP.new(key) # pylint: disable=used-before-assignment
aes = cipher.decrypt(ret["key"])
aes = key.decrypt(ret["key"], self.opts["encryption_algorithm"])
# Decrypt using the public key.
pcrypt = salt.crypt.Crypticle(self.opts, aes)
@ -250,7 +235,9 @@ class AsyncReqChannel:
raise tornado.gen.Return(data["pillar"])
def verify_signature(self, data, sig):
return salt.crypt.verify_signature(self.master_pubkey_path, data, sig)
return salt.crypt.PublicKey(self.master_pubkey_path).verify(
data, sig, self.opts["signing_algorithm"]
)
@tornado.gen.coroutine
def _crypted_transfer(self, load, timeout, raw=False):
@ -594,7 +581,10 @@ class AsyncPubChannel:
# Verify that the signature is valid
if not salt.crypt.verify_signature(
self.master_pubkey_path, payload["load"], payload.get("sig")
self.master_pubkey_path,
payload["load"],
payload.get("sig"),
algorithm=payload["sig_algo"],
):
raise salt.crypt.AuthenticationError(
"Message signature failed to validate."

View file

@ -26,21 +26,9 @@ import salt.utils.minions
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.verify
from salt.exceptions import SaltDeserializationError
from salt.exceptions import SaltDeserializationError, UnsupportedAlgorithm
from salt.utils.cache import CacheCli
try:
from M2Crypto import RSA
HAS_M2 = True
except ImportError:
HAS_M2 = False
try:
from Cryptodome.Cipher import PKCS1_OAEP
except ImportError:
from Crypto.Cipher import PKCS1_OAEP # nosec
log = logging.getLogger(__name__)
@ -197,13 +185,24 @@ class ReqServerChannel:
req_opts["tgt"],
nonce,
sign_messages,
payload.get("enc_algo", salt.crypt.OAEP_SHA1),
payload.get("sig_algo", salt.crypt.PKCS1v15_SHA1),
),
)
log.error("Unknown req_fun %s", req_fun)
# always attempt to return an error to the minion
raise tornado.gen.Return("Server-side exception handling payload")
def _encrypt_private(self, ret, dictkey, target, nonce=None, sign_messages=True):
def _encrypt_private(
self,
ret,
dictkey,
target,
nonce=None,
sign_messages=True,
encryption_algorithm=salt.crypt.OAEP_SHA1,
signing_algorithm=salt.crypt.PKCS1v15_SHA1,
):
"""
The server equivalent of ReqChannel.crypted_transfer_decode_dictentry
"""
@ -222,7 +221,7 @@ class ReqServerChannel:
log.error("AES key not found")
return {"error": "AES key not found"}
pret = {}
pret["key"] = pub.encrypt(key)
pret["key"] = pub.encrypt(key, encryption_algorithm)
if ret is False:
ret = {}
if sign_messages:
@ -233,20 +232,31 @@ class ReqServerChannel:
)
signed_msg = {
"data": tosign,
"sig": salt.crypt.PrivateKey(self.master_key.rsa_path).sign(tosign),
"sig": salt.crypt.PrivateKey(self.master_key.rsa_path).sign(
tosign, algorithm=signing_algorithm
),
}
pret[dictkey] = pcrypt.dumps(signed_msg)
else:
pret[dictkey] = pcrypt.dumps(ret)
return pret
def _clear_signed(self, load):
tosign = salt.payload.dumps(load)
return {
"enc": "clear",
"load": tosign,
"sig": salt.crypt.sign_message(self.master_key.rsa_path, tosign),
}
def _clear_signed(self, load, algorithm):
try:
tosign = salt.payload.dumps(load)
return {
"enc": "clear",
"load": tosign,
"sig": salt.crypt.PrivateKey(self.master_key.rsa_path).sign(
tosign, algorithm=algorithm
),
}
except UnsupportedAlgorithm:
log.info(
"Minion tried to authenticate with unsupported signing algorithm: %s",
algorithm,
)
return {"enc": "clear", "load": {"ret": "bad sig algo"}}
def _update_aes(self):
"""
@ -306,10 +316,15 @@ class ReqServerChannel:
"""
import salt.master
enc_algo = load.get("enc_algo", salt.crypt.OAEP_SHA1)
sig_algo = load.get("sig_algo", salt.crypt.PKCS1v15_SHA1)
if not salt.utils.verify.valid_id(self.opts, load["id"]):
log.info("Authentication request from invalid id %s", load["id"])
if sign_messages:
return self._clear_signed({"ret": False, "nonce": load["nonce"]})
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
log.info("Authentication request from %s", load["id"])
@ -351,7 +366,7 @@ class ReqServerChannel:
)
if sign_messages:
return self._clear_signed(
{"ret": "full", "nonce": load["nonce"]}
{"ret": "full", "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": "full"}}
@ -385,7 +400,9 @@ class ReqServerChannel:
if self.opts.get("auth_events") is True:
self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth"))
if sign_messages:
return self._clear_signed({"ret": False, "nonce": load["nonce"]})
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
elif os.path.isfile(pubfn):
@ -413,7 +430,7 @@ class ReqServerChannel:
)
if sign_messages:
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
@ -427,7 +444,9 @@ class ReqServerChannel:
if self.opts.get("auth_events") is True:
self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth"))
if sign_messages:
return self._clear_signed({"ret": False, "nonce": load["nonce"]})
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
@ -462,7 +481,8 @@ class ReqServerChannel:
self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth"))
if sign_messages:
return self._clear_signed(
{"ret": key_result, "nonce": load["nonce"]}
{"ret": key_result, "nonce": load["nonce"]},
sig_algo,
)
else:
return {"enc": "clear", "load": {"ret": key_result}}
@ -490,7 +510,9 @@ class ReqServerChannel:
if self.opts.get("auth_events") is True:
self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth"))
if sign_messages:
return self._clear_signed({"ret": False, "nonce": load["nonce"]})
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
@ -522,7 +544,7 @@ class ReqServerChannel:
)
if sign_messages:
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
@ -546,7 +568,7 @@ class ReqServerChannel:
)
if sign_messages:
return self._clear_signed(
{"ret": True, "nonce": load["nonce"]}
{"ret": True, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": True}}
@ -573,7 +595,7 @@ class ReqServerChannel:
)
if sign_messages:
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
@ -587,7 +609,9 @@ class ReqServerChannel:
if self.opts.get("auth_events") is True:
self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth"))
if sign_messages:
return self._clear_signed({"ret": False, "nonce": load["nonce"]})
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
@ -609,7 +633,9 @@ class ReqServerChannel:
elif not load["pub"]:
log.error("Public key is empty: %s", load["id"])
if sign_messages:
return self._clear_signed({"ret": False, "nonce": load["nonce"]})
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
@ -622,16 +648,16 @@ class ReqServerChannel:
# The key payload may sometimes be corrupt when using auto-accept
# and an empty request comes in
try:
pub = salt.crypt.get_rsa_pub_key(pubfn)
pub = salt.crypt.PublicKey(pubfn)
except salt.crypt.InvalidKeyError as err:
log.error('Corrupt public key "%s": %s', pubfn, err)
if sign_messages:
return self._clear_signed({"ret": False, "nonce": load["nonce"]})
return self._clear_signed(
{"ret": False, "nonce": load["nonce"]}, sig_algo
)
else:
return {"enc": "clear", "load": {"ret": False}}
if not HAS_M2:
cipher = PKCS1_OAEP.new(pub) # pylint: disable=used-before-assignment
ret = {
"enc": "pub",
"pub_key": self.master_key.get_pub_str(),
@ -654,69 +680,66 @@ class ReqServerChannel:
key_pass = salt.utils.sdb.sdb_get(
self.opts["signing_key_pass"], self.opts
)
log.debug("Signing master public key before sending")
pub_sign = salt.crypt.sign_message(
self.master_key.get_sign_paths()[1], ret["pub_key"], key_pass
self.master_key.get_sign_paths()[1],
ret["pub_key"],
key_pass,
algorithm=sig_algo,
)
ret.update({"pub_sig": binascii.b2a_base64(pub_sign)})
if not HAS_M2:
mcipher = PKCS1_OAEP.new(self.master_key.key)
if self.opts["auth_mode"] >= 2:
if "token" in load:
try:
if HAS_M2:
mtoken = self.master_key.key.private_decrypt(
load["token"], RSA.pkcs1_oaep_padding
)
else:
mtoken = mcipher.decrypt(load["token"])
aes = f"{self.aes_key}_|-{mtoken}"
except Exception: # pylint: disable=broad-except
mtoken = self.master_key.key.decrypt(load["token"], enc_algo)
aes = "{}_|-{}".format(
salt.master.SMaster.secrets["aes"]["secret"].value, mtoken
)
except UnsupportedAlgorithm as exc:
log.info(
"Minion %s tried to authenticate with unsupported encryption algorithm: %s",
load["id"],
enc_algo,
)
return {"enc": "clear", "load": {"ret": "bad enc algo"}}
except Exception as exc: # pylint: disable=broad-except
log.warning("Token failed to decrypt %s", exc)
# Token failed to decrypt, send back the salty bacon to
# support older minions
pass
else:
aes = self.aes_key
if HAS_M2:
ret["aes"] = pub.public_encrypt(aes, RSA.pkcs1_oaep_padding)
else:
ret["aes"] = cipher.encrypt(aes)
ret["aes"] = pub.encrypt(aes, enc_algo)
else:
if "token" in load:
try:
if HAS_M2:
mtoken = self.master_key.key.private_decrypt(
load["token"], RSA.pkcs1_oaep_padding
)
ret["token"] = pub.public_encrypt(
mtoken, RSA.pkcs1_oaep_padding
)
else:
mtoken = mcipher.decrypt(load["token"])
ret["token"] = cipher.encrypt(mtoken)
except Exception: # pylint: disable=broad-except
mtoken = self.master_key.key.decrypt(load["token"], enc_algo)
ret["token"] = pub.encrypt(mtoken, enc_algo)
except UnsupportedAlgorithm as exc:
log.info(
"Minion %s tried to authenticate with unsupported encryption algorithm: %s",
load["id"],
enc_algo,
)
return {"enc": "clear", "load": {"ret": "bad enc algo"}}
except Exception as exc: # pylint: disable=broad-except
# Token failed to decrypt, send back the salty bacon to
# support older minions
pass
log.warning("Token failed to decrypt: %r", exc)
aes = self.aes_key
if HAS_M2:
ret["aes"] = pub.public_encrypt(aes, RSA.pkcs1_oaep_padding)
else:
ret["aes"] = cipher.encrypt(aes)
ret["aes"] = pub.encrypt(aes, enc_algo)
# Be aggressive about the signature
digest = salt.utils.stringutils.to_bytes(hashlib.sha256(aes).hexdigest())
ret["sig"] = salt.crypt.private_encrypt(self.master_key.key, digest)
ret["sig"] = self.master_key.key.encrypt(digest)
eload = {"result": True, "act": "accept", "id": load["id"], "pub": load["pub"]}
if self.opts.get("auth_events") is True:
self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth"))
if sign_messages:
ret["nonce"] = load["nonce"]
return self._clear_signed(ret)
return self._clear_signed(ret, sig_algo)
return ret
def close(self):
@ -898,9 +921,11 @@ class PubServerChannel:
payload["load"] = crypticle.dumps(load)
if self.opts["sign_pub_messages"]:
log.debug("Signing data packet")
payload["sig_algo"] = self.opts["publish_signing_algorithm"]
payload["sig"] = salt.crypt.PrivateKey(
self.master_key.rsa_path,
).sign(payload["load"])
).sign(payload["load"], self.opts["publish_signing_algorithm"])
int_payload = {"payload": salt.payload.dumps(payload)}
# If topics are upported, target matching has to happen master side
@ -964,10 +989,8 @@ class MasterPubServerChannel:
hashlib.sha256(aes).hexdigest()
)
data["peers"][peer] = {
"aes": pub.encrypt(aes),
"sig": salt.crypt.private_encrypt(
self.master_key.master_key, digest
),
"aes": pub.encrypt(aes, algorithm="OAEP-SHA224"),
"sig": self.master_key.master_key.encrypt(digest),
}
else:
log.warning("Peer key missing %r", peer_pub)
@ -1024,7 +1047,7 @@ class MasterPubServerChannel:
self.pushers = []
self.auth_errors = {}
for peer in self.opts.get("cluster_peers", []):
pusher = salt.transport.tcp.TCPPublishServer(
pusher = salt.transport.tcp.PublishServer(
self.opts,
pull_host=peer,
pull_port=tcp_master_pool_port,
@ -1062,7 +1085,9 @@ class MasterPubServerChannel:
peer = data["peer_id"]
aes = data["peers"][self.opts["id"]]["aes"]
sig = data["peers"][self.opts["id"]]["sig"]
key_str = self.master_key.master_private_decrypt(aes)
key_str = self.master_key.master_key.decrypt(
aes, algorithm="OAEP-SHA224"
)
digest = salt.utils.stringutils.to_bytes(
hashlib.sha256(key_str).hexdigest()
)

View file

@ -35,14 +35,6 @@ from salt.exceptions import (
)
from salt.template import compile_template
try:
import Cryptodome.Random
except ImportError:
try:
import Crypto.Random # nosec
except ImportError:
pass # pycrypto < 2.1
log = logging.getLogger(__name__)
@ -2288,8 +2280,6 @@ def create_multiprocessing(parallel_data, queue=None):
This function will be called from another process when running a map in
parallel mode. The result from the create is always a json object.
"""
salt.utils.crypt.reinit_crypto()
parallel_data["opts"]["output"] = "json"
cloud = Cloud(parallel_data["opts"])
try:
@ -2318,8 +2308,6 @@ def destroy_multiprocessing(parallel_data, queue=None):
This function will be called from another process when running a map in
parallel mode. The result from the destroy is always a json object.
"""
salt.utils.crypt.reinit_crypto()
parallel_data["opts"]["output"] = "json"
clouds = salt.loader.clouds(parallel_data["opts"])
@ -2350,8 +2338,6 @@ def run_parallel_map_providers_query(data, queue=None):
This function will be called from another process when building the
providers map.
"""
salt.utils.crypt.reinit_crypto()
cloud = Cloud(data["opts"])
try:
with salt.utils.context.func_globals_inject(

View file

@ -192,7 +192,7 @@ def get_dependencies():
"""
deps = {
"requests": HAS_REQUESTS,
"pycrypto or m2crypto": salt.crypt.HAS_M2 or salt.crypt.HAS_CRYPTO,
"cryptography": salt.crypt.HAS_CRYPTOGRAPHY,
}
return config.check_driver_dependencies(__virtualname__, deps)
@ -4929,7 +4929,7 @@ def get_password_data(
for item in data:
ret[next(iter(item.keys()))] = next(iter(item.values()))
if not salt.crypt.HAS_M2 and not salt.crypt.HAS_CRYPTO:
if not salt.crypt.HAS_CRYPTOGRAPHY:
if "key" in kwargs or "key_file" in kwargs:
log.warning("No crypto library is installed, can not decrypt password")
return ret

View file

@ -13,6 +13,7 @@ import types
import urllib.parse
from copy import deepcopy
import salt.crypt
import salt.defaults.exitcodes
import salt.exceptions
import salt.features
@ -1004,6 +1005,12 @@ VALID_OPTS = immutabletypes.freeze(
"fileserver_interval": int,
"request_channel_timeout": int,
"request_channel_tries": int,
# RSA encryption for minion
"encryption_algorithm": str,
# RSA signing for minion
"signing_algorithm": str,
# Master publish channel signing
"publish_signing_algorithm": str,
}
)
@ -1311,6 +1318,8 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze(
"reactor_niceness": None,
"fips_mode": False,
"features": {},
"encryption_algorithm": "OAEP-SHA1",
"signing_algorithm": "PKCS1v15-SHA1",
}
)
@ -1660,6 +1669,7 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze(
"cluster_peers": [],
"cluster_pki_dir": None,
"features": {},
"publish_signing_algorithm": "PKCS1v15-SHA1",
}
)
@ -3859,6 +3869,17 @@ def apply_minion_config(
_update_ssl_config(opts)
_update_discovery_config(opts)
if opts["encryption_algorithm"] not in salt.crypt.VALID_ENCRYPTION_ALGORITHMS:
raise salt.exceptions.SaltConfigurationError(
f"The encryption algorithm '{opts['encryption_algorithm']}' is not valid. "
f"Please specify one of {','.join(salt.crypt.VALID_ENCRYPTION_ALGORITHMS)}."
)
if opts["signing_algorithm"] not in salt.crypt.VALID_SIGNING_ALGORITHMS:
raise salt.exceptions.SaltConfigurationError(
f"The signging algorithm '{opts['signing_algorithm']}' is not valid. "
f"Please specify one of {','.join(salt.crypt.VALID_SIGNING_ALGORITHMS)}."
)
return opts
@ -4144,6 +4165,12 @@ def apply_master_config(overrides=None, defaults=None):
_update_ssl_config(opts)
_update_discovery_config(opts)
if opts["publish_signing_algorithm"] not in salt.crypt.VALID_SIGNING_ALGORITHMS:
raise salt.exceptions.SaltConfigurationError(
f"The publish signging algorithm '{opts['publish_signing_algorithm']}' is not valid. "
f"Please specify one of {','.join(salt.crypt.VALID_SIGNING_ALGORITHMS)}."
)
return opts

View file

@ -43,45 +43,58 @@ from salt.exceptions import (
MasterExit,
SaltClientError,
SaltReqTimeoutError,
UnsupportedAlgorithm,
)
try:
from M2Crypto import BIO, EVP, RSA
import cryptography.exceptions
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import padding, rsa
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
HAS_M2 = True
HAS_CRYPTOGRAPHY = True
except ImportError:
HAS_M2 = False
if not HAS_M2:
try:
from Cryptodome import Random
from Cryptodome.Cipher import AES, PKCS1_OAEP
from Cryptodome.Cipher import PKCS1_v1_5 as PKCS1_v1_5_CIPHER
from Cryptodome.Hash import SHA
from Cryptodome.PublicKey import RSA
from Cryptodome.Signature import PKCS1_v1_5
HAS_CRYPTO = True
except ImportError:
HAS_CRYPTO = False
if not HAS_M2 and not HAS_CRYPTO:
try:
# let this be imported, if possible
from Crypto import Random # nosec
from Crypto.Cipher import AES, PKCS1_OAEP # nosec
from Crypto.Cipher import PKCS1_v1_5 as PKCS1_v1_5_CIPHER # nosec
from Crypto.Hash import SHA # nosec
from Crypto.PublicKey import RSA # nosec
from Crypto.Signature import PKCS1_v1_5 # nosec
HAS_CRYPTO = True
except ImportError:
HAS_CRYPTO = False
HAS_CRYPTOGRAPHY = False
log = logging.getLogger(__name__)
OAEP = "OAEP"
PKCS1v15 = "PKCS1v15"
SHA1 = "SHA1"
SHA224 = "SHA224"
OAEP_SHA1 = f"{OAEP}-{SHA1}"
OAEP_SHA224 = f"{OAEP}-{SHA224}"
PKCS1v15_SHA1 = f"{PKCS1v15}-{SHA1}"
PKCS1v15_SHA224 = f"{PKCS1v15}-{SHA224}"
VALID_HASHES = (
SHA1,
SHA224,
)
VALID_PADDING_FOR_SIGNING = (PKCS1v15,)
VALID_PADDING_FOR_ENCRYPTION = (OAEP,)
VALID_ENCRYPTION_ALGORITHMS = (
OAEP_SHA1,
OAEP_SHA224,
)
VALID_SIGNING_ALGORITHMS = (
PKCS1v15_SHA1,
PKCS1v15_SHA224,
)
def fips_enabled():
if HAS_CRYPTOGRAPHY:
import cryptography.hazmat.backends.openssl.backend
return cryptography.hazmat.backends.openssl.backend._fips_enabled
def clean_key(key):
"""
@ -128,7 +141,7 @@ def dropfile(cachedir, user=None, master_id=""):
os.rename(dfn_next, dfn)
def gen_keys(keydir, keyname, keysize, user=None, passphrase=None):
def gen_keys(keydir, keyname, keysize, user=None, passphrase=None, e=65537):
"""
Generate a RSA public keypair for use with salt
@ -145,11 +158,8 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None):
priv = f"{base}.pem"
pub = f"{base}.pub"
if HAS_M2:
gen = RSA.gen_key(keysize, 65537, lambda: None)
else:
salt.utils.crypt.reinit_crypto()
gen = RSA.generate(bits=keysize, e=65537)
gen = rsa.generate_private_key(e, keysize)
if os.path.isfile(priv):
# Between first checking and the generation another process has made
# a key! Use the winner's key
@ -164,24 +174,30 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None):
)
with salt.utils.files.set_umask(0o277):
if HAS_M2:
# if passphrase is empty or None use no cipher
if not passphrase:
gen.save_pem(priv, cipher=None)
with salt.utils.files.fopen(priv, "wb+") as f:
if passphrase:
enc = serialization.BestAvailableEncryption(passphrase.encode())
_format = serialization.PrivateFormat.TraditionalOpenSSL
if fips_enabled():
_format = serialization.PrivateFormat.PKCS8
else:
gen.save_pem(
priv,
cipher="des_ede3_cbc",
callback=lambda x: salt.utils.stringutils.to_bytes(passphrase),
)
else:
with salt.utils.files.fopen(priv, "wb+") as f:
f.write(gen.exportKey("PEM", passphrase))
if HAS_M2:
gen.save_pub_key(pub)
else:
with salt.utils.files.fopen(pub, "wb+") as f:
f.write(gen.publickey().exportKey("PEM"))
enc = serialization.NoEncryption()
_format = serialization.PrivateFormat.TraditionalOpenSSL
pem = gen.private_bytes(
encoding=serialization.Encoding.PEM,
format=_format,
encryption_algorithm=enc,
)
f.write(pem)
pubkey = gen.public_key()
with salt.utils.files.fopen(pub, "wb+") as f:
pem = pubkey.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
f.write(pem)
os.chmod(priv, 0o400)
if user:
try:
@ -197,81 +213,120 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None):
return priv
class PrivateKey:
class BaseKey:
@staticmethod
def parse_padding_for_signing(algorithm):
if algorithm not in VALID_SIGNING_ALGORITHMS:
raise UnsupportedAlgorithm(f"Invalid signing algorithm: {algorithm}")
_pad, _hash = algorithm.split("-", 1)
if _pad not in VALID_PADDING_FOR_SIGNING:
raise UnsupportedAlgorithm(f"Invalid padding algorithm: {_pad}")
return getattr(padding, _pad)
@staticmethod
def parse_padding_for_encryption(algorithm):
if algorithm not in VALID_ENCRYPTION_ALGORITHMS:
raise UnsupportedAlgorithm(f"Invalid encryption algorithm: {algorithm}")
_pad, _hash = algorithm.split("-", 1)
if _pad not in VALID_PADDING_FOR_ENCRYPTION:
raise UnsupportedAlgorithm(f"Invalid padding algorithm: {_pad}")
return getattr(padding, _pad)
@staticmethod
def parse_hash(algorithm):
if "-" not in algorithm:
raise UnsupportedAlgorithm(f"Invalid encryption algorithm: {algorithm}")
_pad, _hash = algorithm.split("-", 1)
if _hash not in VALID_HASHES:
raise Exception("Invalid hashing algorithm")
return getattr(hashes, _hash)
class PrivateKey(BaseKey):
def __init__(self, path, passphrase=None):
if HAS_M2:
self.key = RSA.load_key(path, lambda x: bytes(passphrase))
else:
with salt.utils.files.fopen(path) as f:
self.key = RSA.importKey(f.read(), passphrase)
self.key = get_rsa_key(path, passphrase)
def encrypt(self, data):
if HAS_M2:
return self.key.private_encrypt(data, salt.utils.rsax931.RSA_X931_PADDING)
else:
return salt.utils.rsax931.RSAX931Signer(self.key.exportKey("PEM")).sign(
data
pem = self.key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
return salt.utils.rsax931.RSAX931Signer(pem).sign(data)
def sign(self, data, algorithm=PKCS1v15_SHA1):
_padding = self.parse_padding_for_signing(algorithm)
_hash = self.parse_hash(algorithm)
try:
return self.key.sign(
salt.utils.stringutils.to_bytes(data), _padding(), _hash()
)
except cryptography.exceptions.UnsupportedAlgorithm:
raise UnsupportedAlgorithm(f"Unsupported algorithm: {algorithm}")
def sign(self, data):
if HAS_M2:
md = EVP.MessageDigest("sha1")
md.update(salt.utils.stringutils.to_bytes(data))
digest = md.final()
return self.key.sign(digest)
else:
signer = PKCS1_v1_5.new(self.key)
return signer.sign(SHA.new(salt.utils.stringutils.to_bytes(data)))
def decrypt(self, data, algorithm=OAEP_SHA1):
_padding = self.parse_padding_for_encryption(algorithm)
_hash = self.parse_hash(algorithm)
try:
return self.key.decrypt(
data,
_padding(
mgf=padding.MGF1(algorithm=_hash()),
algorithm=_hash(),
label=None,
),
)
except cryptography.exceptions.UnsupportedAlgorithm:
raise UnsupportedAlgorithm(f"Unsupported algorithm: {algorithm}")
class PublicKey:
def __init__(self, path, _HAS_M2=HAS_M2):
self._HAS_M2 = _HAS_M2
if self._HAS_M2:
with salt.utils.files.fopen(path, "rb") as f:
data = f.read().replace(b"RSA ", b"")
bio = BIO.MemoryBuffer(data)
class PublicKey(BaseKey):
def __init__(self, path):
with salt.utils.files.fopen(path, "rb") as fp:
try:
self.key = RSA.load_pub_key_bio(bio)
except RSA.RSAError:
raise InvalidKeyError("Encountered bad RSA public key")
else:
with salt.utils.files.fopen(path) as f:
try:
self.key = RSA.importKey(f.read())
except (ValueError, IndexError, TypeError):
raise InvalidKeyError("Encountered bad RSA public key")
self.key = serialization.load_pem_public_key(fp.read())
except ValueError as exc:
raise InvalidKeyError("Invalid key")
def encrypt(self, data):
def encrypt(self, data, algorithm=OAEP_SHA1):
_padding = self.parse_padding_for_encryption(algorithm)
_hash = self.parse_hash(algorithm)
bdata = salt.utils.stringutils.to_bytes(data)
if self._HAS_M2:
return self.key.public_encrypt(bdata, salt.crypt.RSA.pkcs1_oaep_padding)
else:
return salt.crypt.PKCS1_OAEP.new(self.key).encrypt(bdata)
def verify(self, data, signature):
if self._HAS_M2:
md = EVP.MessageDigest("sha1")
md.update(salt.utils.stringutils.to_bytes(data))
digest = md.final()
try:
return self.key.verify(digest, signature)
except RSA.RSAError as exc:
log.debug("Signature verification failed: %s", exc.args[0])
return False
else:
verifier = PKCS1_v1_5.new(self.key)
return verifier.verify(
SHA.new(salt.utils.stringutils.to_bytes(data)), signature
try:
return self.key.encrypt(
bdata,
_padding(
mgf=padding.MGF1(algorithm=_hash()),
algorithm=_hash(),
label=None,
),
)
except cryptography.exceptions.UnsupportedAlgorithm:
raise UnsupportedAlgorithm(f"Unsupported algorithm: {algorithm}")
def verify(self, data, signature, algorithm=PKCS1v15_SHA1):
_padding = self.parse_padding_for_signing(algorithm)
_hash = self.parse_hash(algorithm)
try:
self.key.verify(
salt.utils.stringutils.to_bytes(signature),
salt.utils.stringutils.to_bytes(data),
_padding(),
_hash(),
)
except cryptography.exceptions.InvalidSignature:
return False
return True
def decrypt(self, data):
data = salt.utils.stringutils.to_bytes(data)
if HAS_M2:
return self.key.public_decrypt(data, salt.utils.rsax931.RSA_X931_PADDING)
else:
verifier = salt.utils.rsax931.RSAX931Verifier(self.key.exportKey("PEM"))
return verifier.verify(data)
pem = self.key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
verifier = salt.utils.rsax931.RSAX931Verifier(pem)
return verifier.verify(data)
@salt.utils.decorators.memoize
@ -284,12 +339,15 @@ def _get_key_with_evict(path, timestamp, passphrase):
modified then the params are different and the key is loaded from disk.
"""
log.debug("salt.crypt._get_key_with_evict: Loading private key")
if HAS_M2:
key = RSA.load_key(path, lambda x: bytes(passphrase))
if passphrase:
password = passphrase.encode()
else:
with salt.utils.files.fopen(path) as f:
key = RSA.importKey(f.read(), passphrase)
return key
password = None
with salt.utils.files.fopen(path, "rb") as f:
return serialization.load_pem_private_key(
f.read(),
password=password,
)
def get_rsa_key(path, passphrase):
@ -312,61 +370,29 @@ def get_rsa_pub_key(path):
Read a public key off the disk.
"""
log.debug("salt.crypt.get_rsa_pub_key: Loading public key")
if HAS_M2:
with salt.utils.files.fopen(path, "rb") as f:
data = f.read().replace(b"RSA ", b"")
bio = BIO.MemoryBuffer(data)
try:
key = RSA.load_pub_key_bio(bio)
except RSA.RSAError:
raise InvalidKeyError("Encountered bad RSA public key")
else:
with salt.utils.files.fopen(path) as f:
try:
key = RSA.importKey(f.read())
except (ValueError, IndexError, TypeError):
raise InvalidKeyError("Encountered bad RSA public key")
return key
try:
with salt.utils.files.fopen(path, "rb") as fp:
return serialization.load_pem_public_key(fp.read())
except ValueError:
raise InvalidKeyError("Encountered bad RSA public key")
except cryptography.exceptions.UnsupportedAlgorithm:
raise InvalidKeyError("Unsupported key algorithm")
def sign_message(privkey_path, message, passphrase=None):
def sign_message(privkey_path, message, passphrase=None, algorithm=PKCS1v15_SHA1):
"""
Use Crypto.Signature.PKCS1_v1_5 to sign a message. Returns the signature.
"""
key = get_rsa_key(privkey_path, passphrase)
log.debug("salt.crypt.sign_message: Signing message.")
if HAS_M2:
md = EVP.MessageDigest("sha1")
md.update(salt.utils.stringutils.to_bytes(message))
digest = md.final()
return key.sign(digest)
else:
signer = PKCS1_v1_5.new(key)
return signer.sign(SHA.new(salt.utils.stringutils.to_bytes(message)))
return PrivateKey(privkey_path, passphrase).sign(message, algorithm)
def verify_signature(pubkey_path, message, signature):
def verify_signature(pubkey_path, message, signature, algorithm=PKCS1v15_SHA1):
"""
Use Crypto.Signature.PKCS1_v1_5 to verify the signature on a message.
Returns True for valid signature.
"""
log.debug("salt.crypt.verify_signature: Loading public key")
pubkey = get_rsa_pub_key(pubkey_path)
log.debug("salt.crypt.verify_signature: Verifying signature")
if HAS_M2:
md = EVP.MessageDigest("sha1")
md.update(salt.utils.stringutils.to_bytes(message))
digest = md.final()
try:
return pubkey.verify(digest, signature)
except RSA.RSAError as exc:
log.debug("Signature verification failed: %s", exc.args[0])
return False
else:
verifier = PKCS1_v1_5.new(pubkey)
return verifier.verify(
SHA.new(salt.utils.stringutils.to_bytes(message)), signature
)
return PublicKey(pubkey_path).verify(message, signature, algorithm)
def gen_signature(priv_path, pub_path, sign_path, passphrase=None):
@ -400,49 +426,12 @@ def gen_signature(priv_path, pub_path, sign_path, passphrase=None):
return True
def private_encrypt(key, message):
"""
Generate an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA key object
:param str message: The message to sign
:rtype: str
:return: The signature, or an empty string if the signature operation failed
"""
if HAS_M2:
return key.private_encrypt(message, salt.utils.rsax931.RSA_X931_PADDING)
else:
signer = salt.utils.rsax931.RSAX931Signer(key.exportKey("PEM"))
return signer.sign(message)
def public_decrypt(pub, message):
"""
Verify an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA public key object
:param str message: The signed message to verify
:rtype: str
:return: The message (or digest) recovered from the signature, or an
empty string if the verification failed
"""
if HAS_M2:
return pub.public_decrypt(message, salt.utils.rsax931.RSA_X931_PADDING)
else:
verifier = salt.utils.rsax931.RSAX931Verifier(pub.exportKey("PEM"))
return verifier.verify(message)
def pwdata_decrypt(rsa_key, pwdata):
if HAS_M2:
key = RSA.load_key_string(salt.utils.stringutils.to_bytes(rsa_key, "ascii"))
password = key.private_decrypt(pwdata, RSA.pkcs1_padding)
else:
dsize = SHA.digest_size
sentinel = Random.new().read(15 + dsize)
key_obj = RSA.importKey(rsa_key)
key_obj = PKCS1_v1_5_CIPHER.new(key_obj)
password = key_obj.decrypt(pwdata, sentinel)
key = serialization.load_pem_private_key(rsa_key.encode(), password=None)
password = key.decrypt(
pwdata,
padding.PKCS1v15(),
)
return salt.utils.stringutils.to_unicode(password)
@ -580,18 +569,21 @@ class MasterKeys(dict):
self.opts.get("user"),
passphrase,
)
if HAS_M2:
key_error = RSA.RSAError
else:
key_error = ValueError
try:
key = get_rsa_key(path, passphrase)
except key_error as e:
key = PrivateKey(path, passphrase)
except ValueError as e:
message = f"Unable to read key: {path}; file may be corrupt"
except TypeError as e:
message = f"Unable to read key: {path}; passphrase may be incorrect"
log.error(message)
raise MasterExit(message)
log.debug("Loaded %s key: %s", name, path)
return key
except InvalidKeyError as e:
message = f"Unable to read key: {path}; key contains unsupported algorithm"
except cryptography.exceptions.UnsupportedAlgorithm as e:
message = f"Unable to read key: {path}; key contains unsupported algorithm"
else:
log.debug("Loaded %s key: %s", name, path)
return key
log.error(message)
raise MasterExit(message)
def get_pub_str(self, name="master"):
"""
@ -607,12 +599,14 @@ class MasterKeys(dict):
# if not os.path.isfile(path):
# raise RuntimeError(f"The key {path} does not exist.")
if not os.path.isfile(path):
key = self.__get_keys()
if HAS_M2:
key.save_pub_key(path)
else:
with salt.utils.files.fopen(path, "wb+") as wfh:
wfh.write(key.publickey().exportKey("PEM"))
pubkey = self.key.public_key()
with salt.utils.files.fopen(path, "wb+") as f:
f.write(
pubkey.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
)
with salt.utils.files.fopen(path) as rfh:
return clean_key(rfh.read())
@ -654,13 +648,6 @@ class MasterKeys(dict):
log.debug("Writing shared key %s", shared_pub)
shared_pub.write_bytes(master_pub.read_bytes())
def master_private_decrypt(self, data):
if HAS_M2:
return self.master_key.private_decrypt(data, RSA.pkcs1_oaep_padding)
else:
cipher = PKCS1_OAEP.new(self.master_key)
return cipher.decrypt(data)
class AsyncAuth:
"""
@ -729,10 +716,7 @@ class AsyncAuth:
self.mpub = "minion_master.pub"
if not os.path.isfile(self.pub_path):
self.get_keys()
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
salt.utils.crypt.reinit_crypto()
key = self.__key(self.opts)
# TODO: if we already have creds for this key, lets just re-use
if key in AsyncAuth.creds_map:
@ -864,6 +848,18 @@ class AsyncAuth:
"Authentication wait time is %s", acceptance_wait_time
)
continue
elif creds == "bad enc algo":
log.error(
"This minion is using a encryption algorithm that is "
"not supported by it's Master. Please check your minion configutation."
)
break
elif creds == "bad sig algo":
log.error(
"This minion is using a signing algorithm that is "
"not supported by it's Master. Please check your minion configutation."
)
break
break
if not isinstance(creds, dict) or "aes" not in creds:
if self.opts.get("detect_mode") is True:
@ -965,6 +961,13 @@ class AsyncAuth:
if not isinstance(payload, dict) or "load" not in payload:
log.error("Sign-in attempt failed: %s", payload)
return False
elif isinstance(payload["load"], dict) and "ret" in payload["load"]:
if payload["load"]["ret"] == "bad enc algo":
log.error("Sign-in attempt failed: %s", payload)
return "bad enc algo"
elif payload["load"]["ret"] == "bad sig algo":
log.error("Sign-in attempt failed: %s", payload)
return "bad sig algo"
clear_signed_data = payload["load"]
clear_signature = payload["sig"]
@ -991,7 +994,11 @@ class AsyncAuth:
master_pubkey_path = os.path.join(self.opts["pki_dir"], self.mpub)
if os.path.exists(master_pubkey_path) and not PublicKey(
master_pubkey_path
).verify(clear_signed_data, clear_signature):
).verify(
clear_signed_data,
clear_signature,
algorithm=self.opts["signing_algorithm"],
):
log.critical("The payload signature did not validate.")
raise SaltClientError("Invalid signature")
@ -1078,7 +1085,7 @@ class AsyncAuth:
self.opts["keysize"],
self.opts.get("user"),
)
key = get_rsa_key(self.rsa_path, None)
key = PrivateKey(self.rsa_path, None)
log.debug("Loaded minion key: %s", self.rsa_path)
return key
@ -1091,7 +1098,7 @@ class AsyncAuth:
:return: Encrypted token
:rtype: str
"""
return private_encrypt(self.get_keys(), clear_tok)
return self.get_keys().encrypt(clear_tok)
def minion_sign_in_payload(self):
"""
@ -1106,6 +1113,8 @@ class AsyncAuth:
payload["cmd"] = "_auth"
payload["id"] = self.opts["id"]
payload["nonce"] = uuid.uuid4().hex
payload["enc_algo"] = self.opts["encryption_algorithm"]
payload["sig_algo"] = self.opts["signing_algorithm"]
if "autosign_grains" in self.opts:
autosign_grains = {}
for grain in self.opts["autosign_grains"]:
@ -1113,16 +1122,14 @@ class AsyncAuth:
payload["autosign_grains"] = autosign_grains
try:
pubkey_path = os.path.join(self.opts["pki_dir"], self.mpub)
pub = get_rsa_pub_key(pubkey_path)
if HAS_M2:
payload["token"] = pub.public_encrypt(
self.token, RSA.pkcs1_oaep_padding
)
else:
cipher = PKCS1_OAEP.new(pub)
payload["token"] = cipher.encrypt(self.token)
except Exception: # pylint: disable=broad-except
pass
pub = PublicKey(pubkey_path)
payload["token"] = pub.encrypt(
self.token, self.opts["encryption_algorithm"]
)
except FileNotFoundError:
log.debug("Master public key not found")
except Exception as exc: # pylint: disable=broad-except
log.debug("Exception while encrypting token %s", exc)
with salt.utils.files.fopen(self.pub_path) as f:
payload["pub"] = clean_key(f.read())
return payload
@ -1154,25 +1161,19 @@ class AsyncAuth:
log.warning("Auth Called: %s", "".join(traceback.format_stack()))
else:
log.debug("Decrypting the current master AES key")
key = self.get_keys()
if HAS_M2:
key_str = key.private_decrypt(payload["aes"], RSA.pkcs1_oaep_padding)
else:
cipher = PKCS1_OAEP.new(key)
key_str = cipher.decrypt(payload["aes"])
key_str = key.decrypt(payload["aes"], self.opts["encryption_algorithm"])
if "sig" in payload:
m_path = os.path.join(self.opts["pki_dir"], self.mpub)
if os.path.exists(m_path):
try:
mkey = get_rsa_pub_key(m_path)
mkey = PublicKey(m_path)
except Exception: # pylint: disable=broad-except
return "", ""
digest = hashlib.sha256(key_str).hexdigest()
digest = salt.utils.stringutils.to_bytes(digest)
if HAS_M2:
m_digest = public_decrypt(mkey, payload["sig"])
else:
m_digest = public_decrypt(mkey.publickey(), payload["sig"])
m_digest = mkey.decrypt(payload["sig"])
if m_digest != digest:
return "", ""
else:
@ -1184,12 +1185,7 @@ class AsyncAuth:
return key_str.split("_|-")
else:
if "token" in payload:
if HAS_M2:
token = key.private_decrypt(
payload["token"], RSA.pkcs1_oaep_padding
)
else:
token = cipher.decrypt(payload["token"])
token = key.decrypt(payload["token"], self.opts["encryption_algorithm"])
return key_str, token
elif not master_pub:
return key_str, ""
@ -1209,7 +1205,12 @@ class AsyncAuth:
)
if os.path.isfile(path):
res = verify_signature(path, message, binascii.a2b_base64(sig))
res = verify_signature(
path,
message,
binascii.a2b_base64(sig),
algorithm=self.opts["signing_algorithm"],
)
else:
log.error(
"Verification public key %s does not exist. You need to "
@ -1657,15 +1658,10 @@ class Crypticle:
pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE
data = data + salt.utils.stringutils.to_bytes(pad * chr(pad))
iv_bytes = os.urandom(self.AES_BLOCK_SIZE)
if HAS_M2:
cypher = EVP.Cipher(
alg="aes_192_cbc", key=aes_key, iv=iv_bytes, op=1, padding=False
)
encr = cypher.update(data)
encr += cypher.final()
else:
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
encr = cypher.encrypt(data)
cipher = Cipher(algorithms.AES(aes_key), modes.CBC(iv_bytes))
encryptor = cipher.encryptor()
encr = encryptor.update(data)
encr += encryptor.finalize()
data = iv_bytes + encr
sig = hmac.new(hmac_key, data, hashlib.sha256).digest()
return data + sig
@ -1684,7 +1680,6 @@ class Crypticle:
log.debug("Failed to authenticate message")
raise AuthenticationError("message authentication failed")
result = 0
for zipped_x, zipped_y in zip(mac_bytes, sig):
result |= zipped_x ^ zipped_y
if result != 0:
@ -1692,15 +1687,9 @@ class Crypticle:
raise AuthenticationError("message authentication failed")
iv_bytes = data[: self.AES_BLOCK_SIZE]
data = data[self.AES_BLOCK_SIZE :]
if HAS_M2:
cypher = EVP.Cipher(
alg="aes_192_cbc", key=aes_key, iv=iv_bytes, op=0, padding=False
)
encr = cypher.update(data)
data = encr + cypher.final()
else:
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = cypher.decrypt(data)
cipher = Cipher(algorithms.AES(aes_key), modes.CBC(iv_bytes))
decryptor = cipher.decryptor()
data = decryptor.update(data) + decryptor.finalize()
return data[: -data[-1]]
def dumps(self, obj, nonce=None):

View file

@ -362,6 +362,12 @@ class AuthorizationError(SaltException):
"""
class UnsupportedAlgorithm(SaltException):
"""
Thrown when a requested encryption or signing algorithm is un-supported.
"""
class SaltDaemonNotRunning(SaltException):
"""
Throw when a running master/minion/syndic is not running but is needed to

View file

@ -40,6 +40,7 @@ import salt.utils.pkg.rpm
import salt.utils.platform
import salt.utils.stringutils
from salt.utils.network import _clear_interfaces, _get_interfaces
from salt.utils.platform import get_machine_identifier as _get_machine_identifier
from salt.utils.platform import linux_distribution as _linux_distribution
try:
@ -3049,13 +3050,7 @@ def get_machine_id():
if platform.system() == "AIX":
return _aix_get_machine_id()
locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"]
existing_locations = [loc for loc in locations if os.path.exists(loc)]
if not existing_locations:
return {}
else:
with salt.utils.files.fopen(existing_locations[0]) as machineid:
return {"machine_id": machineid.read().strip()}
return _get_machine_identifier()
def cwd():

View file

@ -1281,7 +1281,10 @@ class LazyLoader(salt.utils.lazy.LazyDict):
self.parent_loader = current_loader
token = salt.loader.context.loader_ctxvar.set(self)
try:
return _func_or_method(*args, **kwargs)
ret = _func_or_method(*args, **kwargs)
if isinstance(ret, salt.loader.context.NamedLoaderContext):
ret = ret.value()
return ret
finally:
self.parent_loader = None
salt.loader.context.loader_ctxvar.reset(token)

View file

@ -38,7 +38,6 @@ import salt.serializers.msgpack
import salt.state
import salt.utils.args
import salt.utils.atomicfile
import salt.utils.crypt
import salt.utils.ctx
import salt.utils.event
import salt.utils.files
@ -1313,7 +1312,6 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
)
self.clear_funcs.connect()
self.aes_funcs = AESFuncs(self.opts)
salt.utils.crypt.reinit_crypto()
self.__bind()
@ -1435,7 +1433,7 @@ class AESFuncs(TransportMethods):
return False
pub_path = os.path.join(self.pki_dir, "minions", id_)
try:
pub = salt.crypt.get_rsa_pub_key(pub_path)
pub = salt.crypt.PublicKey(pub_path)
except OSError:
log.warning(
"Salt minion claiming to be %s attempted to communicate with "
@ -1446,7 +1444,7 @@ class AESFuncs(TransportMethods):
except (ValueError, IndexError, TypeError) as err:
log.error('Unable to load public key "%s": %s', pub_path, err)
try:
if salt.crypt.public_decrypt(pub, token) == b"salt":
if pub.decrypt(token) == b"salt":
return True
except ValueError as err:
log.error("Unable to decrypt token: %s", err)

View file

@ -40,7 +40,6 @@ import salt.syspaths
import salt.transport
import salt.utils.args
import salt.utils.context
import salt.utils.crypt
import salt.utils.ctx
import salt.utils.data
import salt.utils.dictdiffer
@ -1808,7 +1807,6 @@ class Minion(MinionBase):
name=name,
args=(instance, self.opts, data, self.connected, creds_map),
)
process.register_after_fork_method(salt.utils.crypt.reinit_crypto)
else:
process = threading.Thread(
target=self._target,

View file

@ -17,7 +17,6 @@ import os
import subprocess
import sys
import salt.utils.crypt
import salt.utils.files
import salt.utils.fsutils
import salt.utils.path
@ -579,10 +578,9 @@ if __name__ == "__main__":
# Double-fork stuff
try:
if os.fork() > 0:
salt.utils.crypt.reinit_crypto()
sys.exit(0)
else:
salt.utils.crypt.reinit_crypto()
pass
except OSError as ex:
sys.exit(1)
@ -592,7 +590,6 @@ if __name__ == "__main__":
try:
pid = os.fork()
if pid > 0:
salt.utils.crypt.reinit_crypto()
with salt.utils.files.fopen(
os.path.join(pidfile, EnvLoader.PID_FILE), "w"
) as fp_:
@ -601,5 +598,4 @@ if __name__ == "__main__":
except OSError as ex:
sys.exit(1)
salt.utils.crypt.reinit_crypto()
main(dbfile, pidfile, mode)

View file

@ -976,8 +976,9 @@ class State:
self.state_con["loader_cache"][agg_fun] = True
try:
low["__agg__"] = True
low = self._aggregate_requisites(low, chunks)
# Aggregate the states *before* aggregating requisites otherwise there will never be requisites to aggregate
low = self.states[agg_fun](low, chunks, running)
low = self._aggregate_requisites(low, chunks)
except TypeError:
log.error("Failed to execute aggregate for state %s", low["state"])
else:

View file

@ -1606,10 +1606,12 @@ def _build_cert(
ca_server=None, signing_policy=None, signing_private_key=None, **kwargs
):
final_kwargs = copy.deepcopy(kwargs)
final_kwargs["signing_private_key"] = signing_private_key
x509util.merge_signing_policy(
__salt__["x509.get_signing_policy"](signing_policy, ca_server=ca_server),
final_kwargs,
)
signing_private_key = final_kwargs.pop("signing_private_key")
builder, _, private_key_loaded, signing_cert = x509util.build_crt(
signing_private_key,

View file

@ -956,7 +956,10 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
await publish_payload(package)
except Exception as exc: # pylint: disable=broad-except
log.error(
"Exception in publisher %s %s", self.pull_uri, exc, exc_info=True
"Exception in publisher %s %s",
self.pull_uri,
exc,
exc_info_on_loglevel=logging.DEBUG,
)
async def publish_payload(self, payload, topic_list=None):

View file

@ -12,35 +12,6 @@ from salt.exceptions import SaltInvocationError
log = logging.getLogger(__name__)
try:
import M2Crypto # pylint: disable=unused-import
Random = None
HAS_M2CRYPTO = True
except ImportError:
HAS_M2CRYPTO = False
if not HAS_M2CRYPTO:
try:
from Cryptodome import Random
HAS_CRYPTODOME = True
except ImportError:
HAS_CRYPTODOME = False
else:
HAS_CRYPTODOME = False
if not HAS_M2CRYPTO and not HAS_CRYPTODOME:
try:
from Crypto import Random # nosec
HAS_CRYPTO = True
except ImportError:
HAS_CRYPTO = False
else:
HAS_CRYPTO = False
def decrypt(
data, rend, translate_newlines=False, renderers=None, opts=None, valid_rend=None
):
@ -117,20 +88,6 @@ def decrypt(
return rend_func(data, translate_newlines=translate_newlines)
def reinit_crypto():
"""
When a fork arises, pycrypto needs to reinit
From its doc::
Caveat: For the random number generator to work correctly,
you must call Random.atfork() in both the parent and
child processes after using os.fork()
"""
if HAS_CRYPTODOME or HAS_CRYPTO:
Random.atfork()
def pem_finger(path=None, key=None, sum_type="sha256"):
"""
Pass in either a raw pem string, or the path on disk to the location of a

View file

@ -381,7 +381,8 @@ def fopen(*args, **kwargs):
# Workaround callers with bad buffering setting for binary files
if kwargs.get("buffering") == 1 and "b" in kwargs.get("mode", ""):
log.debug(
"Line buffering (buffering=1) isn't supported in binary mode, the default buffer size will be used"
"Line buffering (buffering=1) isn't supported in binary mode, "
"the default buffer size will be used"
)
kwargs["buffering"] = io.DEFAULT_BUFFER_SIZE

View file

@ -13,6 +13,7 @@ import io
import logging
import multiprocessing
import os
import pathlib
import shlex
import shutil
import stat
@ -33,6 +34,7 @@ import salt.utils.hashutils
import salt.utils.itertools
import salt.utils.path
import salt.utils.platform
import salt.utils.process
import salt.utils.stringutils
import salt.utils.url
import salt.utils.user
@ -42,7 +44,7 @@ from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS
from salt.exceptions import FileserverConfigError, GitLockError, get_error_message
from salt.utils.event import tagify
from salt.utils.odict import OrderedDict
from salt.utils.process import os_is_running as pid_exists
from salt.utils.platform import get_machine_identifier as _get_machine_identifier
from salt.utils.versions import Version
VALID_REF_TYPES = _DEFAULT_MASTER_OPTS["gitfs_ref_types"]
@ -82,6 +84,14 @@ _INVALID_REPO = (
log = logging.getLogger(__name__)
HAS_PSUTIL = False
try:
import psutil
HAS_PSUTIL = True
except ImportError:
pass
# pylint: disable=import-error
try:
if (
@ -249,6 +259,11 @@ class GitProvider:
def _val_cb(x, y):
return str(y)
# get machine_identifier
self.mach_id = _get_machine_identifier().get(
"machine_id", "no_machine_id_available"
)
self.global_saltenv = salt.utils.data.repack_dictlist(
self.opts.get(f"{self.role}_saltenv", []),
strict=True,
@ -511,6 +526,17 @@ class GitProvider:
os.makedirs(self._salt_working_dir)
self.fetch_request_check()
if HAS_PSUTIL:
cur_pid = os.getpid()
process = psutil.Process(cur_pid)
dgm_process_dir = dir(process)
cache_dir = self.opts.get("cachedir", None)
gitfs_active = self.opts.get("gitfs_remotes", None)
if cache_dir and gitfs_active:
salt.utils.process.register_cleanup_finalize_function(
gitfs_finalize_cleanup, cache_dir
)
def get_cache_basehash(self):
return self._cache_basehash
@ -752,7 +778,12 @@ class GitProvider:
except OSError as exc:
if exc.errno == errno.ENOENT:
# No lock file present
pass
msg = (
f"Attempt to remove lock {self.url} for file ({lock_file}) "
f"which does not exist, exception : {exc} "
)
log.debug(msg)
elif exc.errno == errno.EISDIR:
# Somehow this path is a directory. Should never happen
# unless some wiseguy manually creates a directory at this
@ -764,8 +795,9 @@ class GitProvider:
else:
_add_error(failed, exc)
else:
msg = "Removed {} lock for {} remote '{}'".format(
lock_type, self.role, self.id
msg = (
f"Removed {lock_type} lock for {self.role} remote '{self.id}' "
f"on machine_id '{self.mach_id}'"
)
log.debug(msg)
success.append(msg)
@ -904,7 +936,19 @@ class GitProvider:
self._get_lock_file(lock_type="update"),
self.role,
)
else:
log.warning(
"Update lock file generated an unexpected exception for %s remote '%s', "
"The lock file %s for %s type=update operation, exception: %s .",
self.role,
self.id,
self._get_lock_file(lock_type="update"),
self.role,
str(exc),
)
return False
except NotImplementedError as exc:
log.warning("fetch got NotImplementedError exception %s", exc)
def _lock(self, lock_type="update", failhard=False):
"""
@ -930,7 +974,11 @@ class GitProvider:
)
with os.fdopen(fh_, "wb"):
# Write the lock file and close the filehandle
os.write(fh_, salt.utils.stringutils.to_bytes(str(os.getpid())))
os.write(
fh_,
salt.utils.stringutils.to_bytes(f"{os.getpid()}\n{self.mach_id}\n"),
)
except OSError as exc:
if exc.errno == errno.EEXIST:
with salt.utils.files.fopen(self._get_lock_file(lock_type), "r") as fd_:
@ -942,40 +990,66 @@ class GitProvider:
# Lock file is empty, set pid to 0 so it evaluates as
# False.
pid = 0
try:
mach_id = salt.utils.stringutils.to_unicode(
fd_.readline()
).rstrip()
except ValueError as exc:
# Lock file is empty, set machine id to 0 so it evaluates as
# False.
mach_id = 0
global_lock_key = self.role + "_global_lock"
lock_file = self._get_lock_file(lock_type=lock_type)
if self.opts[global_lock_key]:
msg = (
"{} is enabled and {} lockfile {} is present for "
"{} remote '{}'.".format(
global_lock_key,
lock_type,
lock_file,
self.role,
self.id,
)
f"{global_lock_key} is enabled and {lock_type} lockfile {lock_file} "
f"is present for {self.role} remote '{self.id}' on machine_id "
f"{self.mach_id} with pid '{pid}'."
)
if pid:
msg += f" Process {pid} obtained the lock"
if not pid_exists(pid):
msg += (
" but this process is not running. The "
"update may have been interrupted. If "
"using multi-master with shared gitfs "
"cache, the lock may have been obtained "
"by another master."
)
if self.mach_id or mach_id:
msg += f" for machine_id {mach_id}, current machine_id {self.mach_id}"
if not salt.utils.process.os_is_running(pid):
if self.mach_id != mach_id:
msg += (
" but this process is not running. The "
"update may have been interrupted. If "
"using multi-master with shared gitfs "
"cache, the lock may have been obtained "
f"by another master, with machine_id {mach_id}"
)
else:
msg += (
" but this process is not running. The "
"update may have been interrupted. "
" Given this process is for the same machine"
" the lock will be reallocated to new process "
)
log.warning(msg)
success, fail = self._clear_lock()
if success:
return self.__lock(
lock_type="update", failhard=failhard
)
elif failhard:
raise
return
log.warning(msg)
if failhard:
raise
return
elif pid and pid_exists(pid):
elif pid and salt.utils.process.os_is_running(pid):
log.warning(
"Process %d has a %s %s lock (%s)",
"Process %d has a %s %s lock (%s) on machine_id %s",
pid,
self.role,
lock_type,
lock_file,
self.mach_id,
)
if failhard:
raise
@ -983,12 +1057,13 @@ class GitProvider:
else:
if pid:
log.warning(
"Process %d has a %s %s lock (%s), but this "
"Process %d has a %s %s lock (%s) on machine_id %s, but this "
"process is not running. Cleaning up lock file.",
pid,
self.role,
lock_type,
lock_file,
self.mach_id,
)
success, fail = self._clear_lock()
if success:
@ -997,12 +1072,14 @@ class GitProvider:
raise
return
else:
msg = "Unable to set {} lock for {} ({}): {} ".format(
lock_type, self.id, self._get_lock_file(lock_type), exc
msg = (
f"Unable to set {lock_type} lock for {self.id} "
f"({self._get_lock_file(lock_type)}) on machine_id {self.mach_id}: {exc}"
)
log.error(msg, exc_info=True)
raise GitLockError(exc.errno, msg)
msg = f"Set {lock_type} lock for {self.role} remote '{self.id}'"
msg = f"Set {lock_type} lock for {self.role} remote '{self.id}' on machine_id '{self.mach_id}'"
log.debug(msg)
return msg
@ -1019,6 +1096,15 @@ class GitProvider:
try:
result = self._lock(lock_type="update")
except GitLockError as exc:
log.warning(
"Update lock file generated an unexpected exception for %s remote '%s', "
"The lock file %s for %s type=update operation, exception: %s .",
self.role,
self.id,
self._get_lock_file(lock_type="update"),
self.role,
str(exc),
)
failed.append(exc.strerror)
else:
if result is not None:
@ -1028,7 +1114,8 @@ class GitProvider:
@contextlib.contextmanager
def gen_lock(self, lock_type="update", timeout=0, poll_interval=0.5):
"""
Set and automatically clear a lock
Set and automatically clear a lock,
should be called from a context, for example: with self.gen_lock()
"""
if not isinstance(lock_type, str):
raise GitLockError(errno.EINVAL, f"Invalid lock_type '{lock_type}'")
@ -1049,17 +1136,23 @@ class GitProvider:
if poll_interval > timeout:
poll_interval = timeout
lock_set = False
lock_set1 = False
lock_set2 = False
try:
time_start = time.time()
while True:
try:
self._lock(lock_type=lock_type, failhard=True)
lock_set = True
yield
lock_set1 = True
# docs state need to yield a single value, lock_set will do
yield lock_set1
# Break out of his loop once we've yielded the lock, to
# avoid continued attempts to iterate and establish lock
# just ensuring lock_set is true (belts and braces)
lock_set2 = True
break
except (OSError, GitLockError) as exc:
if not timeout or time.time() - time_start > timeout:
raise GitLockError(exc.errno, exc.strerror)
@ -1075,7 +1168,13 @@ class GitProvider:
time.sleep(poll_interval)
continue
finally:
if lock_set:
if lock_set1 or lock_set2:
msg = (
f"Attempting to remove '{lock_type}' lock for "
f"'{self.role}' remote '{self.id}' due to lock_set1 "
f"'{lock_set1}' or lock_set2 '{lock_set2}'"
)
log.debug(msg)
self.clear_lock(lock_type=lock_type)
def init_remote(self):
@ -1365,9 +1464,7 @@ class GitPython(GitProvider):
# function.
raise GitLockError(
exc.errno,
"Checkout lock exists for {} remote '{}'".format(
self.role, self.id
),
f"Checkout lock exists for {self.role} remote '{self.id}'",
)
else:
log.error(
@ -1716,9 +1813,7 @@ class Pygit2(GitProvider):
# function.
raise GitLockError(
exc.errno,
"Checkout lock exists for {} remote '{}'".format(
self.role, self.id
),
f"Checkout lock exists for {self.role} remote '{self.id}'",
)
else:
log.error(
@ -2233,10 +2328,8 @@ class Pygit2(GitProvider):
if not self.ssl_verify:
warnings.warn(
"pygit2 does not support disabling the SSL certificate "
"check in versions prior to 0.23.2 (installed: {}). "
"Fetches for self-signed certificates will fail.".format(
PYGIT2_VERSION
)
f"check in versions prior to 0.23.2 (installed: {PYGIT2_VERSION}). "
"Fetches for self-signed certificates will fail."
)
def verify_auth(self):
@ -2489,11 +2582,12 @@ class GitBase:
if self.provider in AUTH_PROVIDERS:
override_params += AUTH_PARAMS
elif global_auth_params:
msg_auth_providers = "{}".format(", ".join(AUTH_PROVIDERS))
msg = (
"{0} authentication was configured, but the '{1}' "
"{0}_provider does not support authentication. The "
"providers for which authentication is supported in {0} "
"are: {2}.".format(self.role, self.provider, ", ".join(AUTH_PROVIDERS))
f"{self.role} authentication was configured, but the '{self.provider}' "
f"{self.role}_provider does not support authentication. The "
f"providers for which authentication is supported in {self.role} "
f"are: {msg_auth_providers}."
)
if self.role == "gitfs":
msg += (
@ -2665,6 +2759,7 @@ class GitBase:
success, failed = repo.clear_lock(lock_type=lock_type)
cleared.extend(success)
errors.extend(failed)
return cleared, errors
def fetch_remotes(self, remotes=None):
@ -2876,15 +2971,13 @@ class GitBase:
errors = []
if GITPYTHON_VERSION < GITPYTHON_MINVER:
errors.append(
"{} is configured, but the GitPython version is earlier than "
"{}. Version {} detected.".format(
self.role, GITPYTHON_MINVER, GITPYTHON_VERSION
)
f"{self.role} is configured, but the GitPython version is earlier than "
f"{GITPYTHON_MINVER}. Version {GITPYTHON_VERSION} detected."
)
if not salt.utils.path.which("git"):
errors.append(
"The git command line utility is required when using the "
"'gitpython' {}_provider.".format(self.role)
f"'gitpython' {self.role}_provider."
)
if errors:
@ -2923,24 +3016,20 @@ class GitBase:
errors = []
if PYGIT2_VERSION < PYGIT2_MINVER:
errors.append(
"{} is configured, but the pygit2 version is earlier than "
"{}. Version {} detected.".format(
self.role, PYGIT2_MINVER, PYGIT2_VERSION
)
f"{self.role} is configured, but the pygit2 version is earlier than "
f"{PYGIT2_MINVER}. Version {PYGIT2_VERSION} detected."
)
if LIBGIT2_VERSION < LIBGIT2_MINVER:
errors.append(
"{} is configured, but the libgit2 version is earlier than "
"{}. Version {} detected.".format(
self.role, LIBGIT2_MINVER, LIBGIT2_VERSION
)
f"{self.role} is configured, but the libgit2 version is earlier than "
f"{LIBGIT2_MINVER}. Version {LIBGIT2_VERSION} detected."
)
if not getattr(pygit2, "GIT_FETCH_PRUNE", False) and not salt.utils.path.which(
"git"
):
errors.append(
"The git command line utility is required when using the "
"'pygit2' {}_provider.".format(self.role)
f"'pygit2' {self.role}_provider."
)
if errors:
@ -3253,10 +3342,11 @@ class GitFS(GitBase):
ret = {"hash_type": self.opts["hash_type"]}
relpath = fnd["rel"]
path = fnd["path"]
lc_hash_type = self.opts["hash_type"]
hashdest = salt.utils.path.join(
self.hash_cachedir,
load["saltenv"],
"{}.hash.{}".format(relpath, self.opts["hash_type"]),
f"{relpath}.hash.{lc_hash_type}",
)
try:
with salt.utils.files.fopen(hashdest, "rb") as fp_:
@ -3291,13 +3381,14 @@ class GitFS(GitBase):
except OSError:
log.error("Unable to make cachedir %s", self.file_list_cachedir)
return []
lc_path_adj = load["saltenv"].replace(os.path.sep, "_|-")
list_cache = salt.utils.path.join(
self.file_list_cachedir,
"{}.p".format(load["saltenv"].replace(os.path.sep, "_|-")),
f"{lc_path_adj}.p",
)
w_lock = salt.utils.path.join(
self.file_list_cachedir,
".{}.w".format(load["saltenv"].replace(os.path.sep, "_|-")),
f".{lc_path_adj}.w",
)
cache_match, refresh_cache, save_cache = salt.fileserver.check_file_list_cache(
self.opts, form, list_cache, w_lock
@ -3561,3 +3652,100 @@ class WinRepo(GitBase):
cachedir = self.do_checkout(repo, fetch_on_fail=fetch_on_fail)
if cachedir is not None:
self.winrepo_dirs[repo.id] = cachedir
def gitfs_finalize_cleanup(cache_dir):
"""
Clean up finalize processes that used gitfs
"""
cur_pid = os.getpid()
mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available")
# need to clean up any resources left around like lock files if using gitfs
# example: lockfile
# /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk
# check for gitfs file locks to ensure no resource leaks
# last chance to clean up any missed unlock droppings
cache_dir = pathlib.Path(cache_dir + "/gitfs/work")
if cache_dir.exists and cache_dir.is_dir():
file_list = list(cache_dir.glob("**/*.lk"))
file_del_list = []
file_pid = 0
file_mach_id = 0
try:
for file_name in file_list:
with salt.utils.files.fopen(file_name, "r") as fd_:
try:
file_pid = int(
salt.utils.stringutils.to_unicode(fd_.readline()).rstrip()
)
except ValueError:
# Lock file is empty, set pid to 0 so it evaluates as False.
file_pid = 0
try:
file_mach_id = salt.utils.stringutils.to_unicode(
fd_.readline()
).rstrip()
except ValueError:
# Lock file is empty, set mach_id to 0 so it evaluates False.
file_mach_id = 0
if cur_pid == file_pid:
if mach_id != file_mach_id:
if not file_mach_id:
msg = (
f"gitfs lock file for pid '{file_pid}' does not "
"contain a machine id, deleting lock file which may "
"affect if using multi-master with shared gitfs cache, "
"the lock may have been obtained by another master "
"recommend updating Salt version on other masters to a "
"version which insert machine identification in lock a file."
)
log.debug(msg)
file_del_list.append((file_name, file_pid, file_mach_id))
else:
file_del_list.append((file_name, file_pid, file_mach_id))
except FileNotFoundError:
log.debug("gitfs lock file: %s not found", file_name)
for file_name, file_pid, file_mach_id in file_del_list:
try:
os.remove(file_name)
except OSError as exc:
if exc.errno == errno.ENOENT:
# No lock file present
msg = (
"SIGTERM clean up of resources attempted to remove lock "
f"file {file_name}, pid '{file_pid}', machine identifier "
f"'{mach_id}' but it did not exist, exception : {exc} "
)
log.debug(msg)
elif exc.errno == errno.EISDIR:
# Somehow this path is a directory. Should never happen
# unless some wiseguy manually creates a directory at this
# path, but just in case, handle it.
try:
shutil.rmtree(file_name)
except OSError as exc:
msg = (
f"SIGTERM clean up of resources, lock file '{file_name}'"
f", pid '{file_pid}', machine identifier '{file_mach_id}'"
f"was a directory, removed directory, exception : '{exc}'"
)
log.debug(msg)
else:
msg = (
"SIGTERM clean up of resources, unable to remove lock file "
f"'{file_name}', pid '{file_pid}', machine identifier "
f"'{file_mach_id}', exception : '{exc}'"
)
log.debug(msg)
else:
msg = (
"SIGTERM clean up of resources, removed lock file "
f"'{file_name}', pid '{file_pid}', machine identifier "
f"'{file_mach_id}'"
)
log.debug(msg)

View file

@ -22,7 +22,6 @@ from jinja2.environment import TemplateModule
from jinja2.exceptions import TemplateRuntimeError
from jinja2.ext import Extension
import salt.fileclient
import salt.utils.data
import salt.utils.files
import salt.utils.json
@ -93,6 +92,8 @@ class SaltCacheLoader(BaseLoader):
or not hasattr(self._file_client, "opts")
or self._file_client.opts["file_roots"] != self.opts["file_roots"]
):
import salt.fileclient
self._file_client = salt.fileclient.get_file_client(
self.opts, self.pillar_rend
)

View file

@ -239,3 +239,22 @@ def spawning_platform():
Salt, however, will force macOS to spawning by default on all python versions
"""
return multiprocessing.get_start_method(allow_none=False) == "spawn"
def get_machine_identifier():
"""
Provide the machine-id for machine/virtualization combination
"""
# pylint: disable=resource-leakage
# Provides:
# machine-id
locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"]
existing_locations = [loc for loc in locations if os.path.exists(loc)]
if not existing_locations:
return {}
else:
# cannot use salt.utils.files.fopen due to circular dependency
with open(
existing_locations[0], encoding=__salt_system_encoding__
) as machineid:
return {"machine_id": machineid.read().strip()}

View file

@ -47,6 +47,9 @@ try:
except ImportError:
HAS_SETPROCTITLE = False
# Process finalization function list
_INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST = []
def appendproctitle(name):
"""
@ -70,7 +73,6 @@ def daemonize(redirect_out=True):
pid = os.fork()
if pid > 0:
# exit first parent
salt.utils.crypt.reinit_crypto()
os._exit(salt.defaults.exitcodes.EX_OK)
except OSError as exc:
log.error("fork #1 failed: %s (%s)", exc.errno, exc)
@ -86,14 +88,11 @@ def daemonize(redirect_out=True):
try:
pid = os.fork()
if pid > 0:
salt.utils.crypt.reinit_crypto()
sys.exit(salt.defaults.exitcodes.EX_OK)
except OSError as exc:
log.error("fork #2 failed: %s (%s)", exc.errno, exc)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
salt.utils.crypt.reinit_crypto()
# A normal daemonization redirects the process output to /dev/null.
# Unfortunately when a python multiprocess is called the output is
# not cleanly redirected and the parent process dies when the
@ -208,7 +207,7 @@ def get_process_info(pid=None):
# pid_exists can have false positives
# for example Windows reserves PID 5 in a hack way
# another reasons is the the process requires kernel permissions
# another reasons is the process requires kernel permissions
try:
raw_process_info.status()
except psutil.NoSuchProcess:
@ -526,11 +525,14 @@ class ProcessManager:
target=tgt, args=args, kwargs=kwargs, name=name or tgt.__qualname__
)
process.register_finalize_method(cleanup_finalize_process, args, kwargs)
if isinstance(process, SignalHandlingProcess):
with default_signals(signal.SIGINT, signal.SIGTERM):
process.start()
else:
process.start()
log.debug("Started '%s' with pid %s", process.name, process.pid)
self._process_map[process.pid] = {
"tgt": tgt,
@ -538,6 +540,7 @@ class ProcessManager:
"kwargs": kwargs,
"Process": process,
}
return process
def restart_process(self, pid):
@ -686,6 +689,7 @@ class ProcessManager:
pass
try:
p_map["Process"].terminate()
except OSError as exc:
if exc.errno not in (errno.ESRCH, errno.EACCES):
raise
@ -1070,6 +1074,21 @@ class SignalHandlingProcess(Process):
msg += "SIGTERM"
msg += ". Exiting"
log.debug(msg)
# Run any registered process finalization routines
for method, args, kwargs in self._finalize_methods:
try:
method(*args, **kwargs)
except Exception: # pylint: disable=broad-except
log.exception(
"Failed to run finalize callback on %s; method=%r; args=%r; and kwargs=%r",
self,
method,
args,
kwargs,
)
continue
if HAS_PSUTIL:
try:
process = psutil.Process(os.getpid())
@ -1085,6 +1104,7 @@ class SignalHandlingProcess(Process):
self.pid,
os.getpid(),
)
except psutil.NoSuchProcess:
log.warning(
"Unable to kill children of process %d, it does not exist."
@ -1156,3 +1176,57 @@ class SubprocessList:
self.processes.remove(proc)
self.count -= 1
log.debug("Subprocess %s cleaned up", proc.name)
def cleanup_finalize_process(*args, **kwargs):
"""
Generic process to allow for any registered process cleanup routines to execute.
While class Process has a register_finalize_method, when a process is looked up by pid
using psutil.Process, there is no method available to register a cleanup process.
Hence, this function is added as part of the add_process to allow usage of other cleanup processes
which cannot be added by the register_finalize_method.
"""
# Run any registered process cleanup routines
for method, args, kwargs in _INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST:
log.debug(
"cleanup_finalize_process, method=%r, args=%r, kwargs=%r",
method,
args,
kwargs,
)
try:
method(*args, **kwargs)
except Exception: # pylint: disable=broad-except
log.exception(
"Failed to run registered function finalize callback; method=%r; args=%r; and kwargs=%r",
method,
args,
kwargs,
)
continue
def register_cleanup_finalize_function(function, *args, **kwargs):
"""
Register a function to run as process terminates
While class Process has a register_finalize_method, when a process is looked up by pid
using psutil.Process, there is no method available to register a cleanup process.
Hence, this function can be used to register a function to allow cleanup processes
which cannot be added by class Process register_finalize_method.
Note: there is no deletion, since it is assummed that if something is registered, it will continue to be used
"""
log.debug(
"register_cleanup_finalize_function entry, function=%r, args=%r, kwargs=%r",
function,
args,
kwargs,
)
finalize_function_tuple = (function, args, kwargs)
if finalize_function_tuple not in _INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST:
_INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST.append(finalize_function_tuple)

View file

@ -461,8 +461,6 @@ class Terminal:
else:
os.close(tty_fd)
salt.utils.crypt.reinit_crypto()
if preexec_fn is not None:
preexec_fn()

View file

@ -11,15 +11,21 @@ from salt.exceptions import CommandExecutionError
try:
import psutil
import pywintypes
import win32api
import win32net
import win32security
from win32con import HWND_BROADCAST, SMTO_ABORTIFHUNG, WM_SETTINGCHANGE
import pywintypes # isort:skip
HAS_WIN32 = True
except ImportError:
HAS_WIN32 = False
try:
import psutil
from win32 import pywintypes, win32api, win32net, win32security
from win32con import HWND_BROADCAST, SMTO_ABORTIFHUNG, WM_SETTINGCHANGE
except ImportError:
HAS_WIN32 = False
# Although utils are often directly imported, it is also possible to use the

View file

@ -706,6 +706,7 @@ def dependency_information(include_salt_cloud=False):
("msgpack-pure", "msgpack_pure", "version"),
("pycrypto", "Crypto", "__version__"),
("pycryptodome", "Cryptodome", "version_info"),
("cryptography", "cryptography", "__version__"),
("PyYAML", "yaml", "__version__"),
("PyZMQ", "zmq", "__version__"),
("ZMQ", "zmq", "zmq_version"),

View file

@ -991,6 +991,9 @@ def salt_syndic_master_factory(
config_overrides = {
"log_level_logfile": "quiet",
"fips_mode": FIPS_TESTRUN,
"publish_signing_algorithm": (
"PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1"
),
}
ext_pillar = []
if salt.utils.platform.is_windows():
@ -1107,6 +1110,9 @@ def salt_master_factory(
config_overrides = {
"log_level_logfile": "quiet",
"fips_mode": FIPS_TESTRUN,
"publish_signing_algorithm": (
"PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1"
),
}
ext_pillar = []
if salt.utils.platform.is_windows():
@ -1216,6 +1222,8 @@ def salt_minion_factory(salt_master_factory):
"file_roots": salt_master_factory.config["file_roots"].copy(),
"pillar_roots": salt_master_factory.config["pillar_roots"].copy(),
"fips_mode": FIPS_TESTRUN,
"encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1",
"signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1",
}
virtualenv_binary = get_virtualenv_binary_path()
@ -1248,6 +1256,8 @@ def salt_sub_minion_factory(salt_master_factory):
"file_roots": salt_master_factory.config["file_roots"].copy(),
"pillar_roots": salt_master_factory.config["pillar_roots"].copy(),
"fips_mode": FIPS_TESTRUN,
"encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1",
"signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1",
}
virtualenv_binary = get_virtualenv_binary_path()

View file

@ -16,6 +16,9 @@ from tests.support.case import ModuleCase
from tests.support.runtests import RUNTIME_VARS
@pytest.mark.skip_on_photonos(
reason="Consistant failures on photon, test needs refactoring"
)
@pytest.mark.windows_whitelisted
class LoaderGrainsTest(ModuleCase):
"""

Some files were not shown because too many files have changed in this diff Show more