mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch 'master' into remove_payload_Serial
This commit is contained in:
commit
374f91f1ca
95 changed files with 1810 additions and 1522 deletions
98
.github/workflows/ci.yml
vendored
98
.github/workflows/ci.yml
vendored
|
@ -470,7 +470,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-salt-onedir:
|
||||
name: Build Salt Onedir
|
||||
|
@ -486,7 +486,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-rpm-pkgs:
|
||||
name: Build RPM Packages
|
||||
|
@ -498,7 +498,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-deb-pkgs:
|
||||
name: Build DEB Packages
|
||||
|
@ -510,7 +510,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-windows-pkgs:
|
||||
name: Build Windows Packages
|
||||
|
@ -522,7 +522,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-macos-pkgs:
|
||||
name: Build macOS Packages
|
||||
|
@ -534,7 +534,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
amazonlinux-2-pkg-tests:
|
||||
name: Amazon Linux 2 Package Tests
|
||||
|
@ -549,7 +549,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -566,7 +566,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -583,7 +583,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -600,7 +600,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -617,7 +617,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -634,7 +634,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -651,7 +651,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -668,7 +668,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -685,7 +685,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -702,7 +702,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -719,7 +719,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -736,7 +736,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -753,7 +753,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -770,7 +770,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: macos
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -787,7 +787,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -804,7 +804,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -821,7 +821,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -838,7 +838,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -855,7 +855,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -872,7 +872,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
|
@ -890,7 +890,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -909,7 +909,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -928,7 +928,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -947,7 +947,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -966,7 +966,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -985,7 +985,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1004,7 +1004,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1023,7 +1023,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1042,7 +1042,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1061,7 +1061,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1080,7 +1080,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1099,7 +1099,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1118,7 +1118,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1137,7 +1137,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1156,7 +1156,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1175,7 +1175,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1194,7 +1194,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1213,7 +1213,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1232,7 +1232,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1251,7 +1251,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1270,7 +1270,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1289,7 +1289,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
@ -1308,7 +1308,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
|
150
.github/workflows/nightly.yml
vendored
150
.github/workflows/nightly.yml
vendored
|
@ -523,7 +523,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-salt-onedir:
|
||||
name: Build Salt Onedir
|
||||
|
@ -539,7 +539,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-rpm-pkgs:
|
||||
name: Build RPM Packages
|
||||
|
@ -551,7 +551,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-deb-pkgs:
|
||||
name: Build DEB Packages
|
||||
|
@ -563,7 +563,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-windows-pkgs:
|
||||
name: Build Windows Packages
|
||||
|
@ -575,7 +575,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
environment: nightly
|
||||
sign-packages: false
|
||||
secrets: inherit
|
||||
|
@ -590,7 +590,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
environment: nightly
|
||||
sign-packages: true
|
||||
secrets: inherit
|
||||
|
@ -608,7 +608,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -625,7 +625,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -642,7 +642,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -659,7 +659,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -676,7 +676,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -693,7 +693,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -710,7 +710,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -727,7 +727,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -744,7 +744,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -761,7 +761,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -778,7 +778,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -795,7 +795,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -812,7 +812,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -829,7 +829,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: macos
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -846,7 +846,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -863,7 +863,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -880,7 +880,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -897,7 +897,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -914,7 +914,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -931,7 +931,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -949,7 +949,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -968,7 +968,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -987,7 +987,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1006,7 +1006,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1025,7 +1025,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1044,7 +1044,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1063,7 +1063,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1082,7 +1082,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1101,7 +1101,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1120,7 +1120,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1139,7 +1139,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1158,7 +1158,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1177,7 +1177,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1196,7 +1196,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1215,7 +1215,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1234,7 +1234,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1253,7 +1253,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1272,7 +1272,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1291,7 +1291,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1310,7 +1310,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1329,7 +1329,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1348,7 +1348,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1367,7 +1367,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -2119,18 +2119,56 @@ jobs:
|
|||
|
||||
- name: Notify Slack
|
||||
id: slack
|
||||
if: always()
|
||||
uses: slackapi/slack-github-action@v1.24.0
|
||||
with:
|
||||
payload: |
|
||||
{
|
||||
"text": "Nightly Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
"blocks": [
|
||||
"attachments": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "Nightly Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }})\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||
}
|
||||
"fallback": "${{ github.workflow }} Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
"color": "${{ steps.get-workflow-info.outputs.conclusion != 'success' && 'ff3d00' || '00e676' }}",
|
||||
"fields": [
|
||||
{
|
||||
"title": "Workflow",
|
||||
"short": true,
|
||||
"value": "${{ github.workflow }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Workflow Run",
|
||||
"short": true,
|
||||
"value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Branch",
|
||||
"short": true,
|
||||
"value": "${{ github.ref_name }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Commit",
|
||||
"short": true,
|
||||
"value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Attempt",
|
||||
"short": true,
|
||||
"value": "${{ github.run_attempt }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Status",
|
||||
"short": true,
|
||||
"value": "${{ steps.get-workflow-info.outputs.conclusion }}",
|
||||
"type": "mrkdwn"
|
||||
}
|
||||
],
|
||||
"author_name": "${{ github.event.sender.login }}",
|
||||
"author_link": "${{ github.event.sender.html_url }}",
|
||||
"author_icon": "${{ github.event.sender.avatar_url }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
64
.github/workflows/release.yml
vendored
64
.github/workflows/release.yml
vendored
|
@ -232,7 +232,7 @@ jobs:
|
|||
distro-slug: almalinux-8
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -252,7 +252,7 @@ jobs:
|
|||
distro-slug: almalinux-8-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -272,7 +272,7 @@ jobs:
|
|||
distro-slug: almalinux-9
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -292,7 +292,7 @@ jobs:
|
|||
distro-slug: almalinux-9-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -312,7 +312,7 @@ jobs:
|
|||
distro-slug: amazonlinux-2
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -332,7 +332,7 @@ jobs:
|
|||
distro-slug: amazonlinux-2-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -352,7 +352,7 @@ jobs:
|
|||
distro-slug: centos-7
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -372,7 +372,7 @@ jobs:
|
|||
distro-slug: centos-7-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -392,7 +392,7 @@ jobs:
|
|||
distro-slug: centosstream-8
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -412,7 +412,7 @@ jobs:
|
|||
distro-slug: centosstream-8-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -432,7 +432,7 @@ jobs:
|
|||
distro-slug: centosstream-9
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -452,7 +452,7 @@ jobs:
|
|||
distro-slug: centosstream-9-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -472,7 +472,7 @@ jobs:
|
|||
distro-slug: debian-10
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -492,7 +492,7 @@ jobs:
|
|||
distro-slug: debian-11
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -512,7 +512,7 @@ jobs:
|
|||
distro-slug: debian-11-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -532,7 +532,7 @@ jobs:
|
|||
distro-slug: fedora-37
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -552,7 +552,7 @@ jobs:
|
|||
distro-slug: fedora-37-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -572,7 +572,7 @@ jobs:
|
|||
distro-slug: fedora-38
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -592,7 +592,7 @@ jobs:
|
|||
distro-slug: fedora-38-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -612,7 +612,7 @@ jobs:
|
|||
distro-slug: photonos-3
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -632,7 +632,7 @@ jobs:
|
|||
distro-slug: photonos-4
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -652,7 +652,7 @@ jobs:
|
|||
distro-slug: ubuntu-20.04
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -672,7 +672,7 @@ jobs:
|
|||
distro-slug: ubuntu-20.04-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -692,7 +692,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -712,7 +712,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -732,7 +732,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -752,7 +752,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -772,7 +772,7 @@ jobs:
|
|||
distro-slug: macos-12
|
||||
platform: darwin
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -792,7 +792,7 @@ jobs:
|
|||
distro-slug: macos-12
|
||||
platform: darwin
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -813,7 +813,7 @@ jobs:
|
|||
platform: windows
|
||||
arch: amd64
|
||||
pkg-type: nsis
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -833,7 +833,7 @@ jobs:
|
|||
platform: windows
|
||||
arch: amd64
|
||||
pkg-type: msi
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
@ -853,7 +853,7 @@ jobs:
|
|||
platform: windows
|
||||
arch: amd64
|
||||
pkg-type: onedir
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
|
|
98
.github/workflows/scheduled.yml
vendored
98
.github/workflows/scheduled.yml
vendored
|
@ -513,7 +513,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-salt-onedir:
|
||||
name: Build Salt Onedir
|
||||
|
@ -529,7 +529,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-rpm-pkgs:
|
||||
name: Build RPM Packages
|
||||
|
@ -541,7 +541,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-deb-pkgs:
|
||||
name: Build DEB Packages
|
||||
|
@ -553,7 +553,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-windows-pkgs:
|
||||
name: Build Windows Packages
|
||||
|
@ -565,7 +565,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-macos-pkgs:
|
||||
name: Build macOS Packages
|
||||
|
@ -577,7 +577,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
amazonlinux-2-pkg-tests:
|
||||
name: Amazon Linux 2 Package Tests
|
||||
|
@ -592,7 +592,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -609,7 +609,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -626,7 +626,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -643,7 +643,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -660,7 +660,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -677,7 +677,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -694,7 +694,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -711,7 +711,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -728,7 +728,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -745,7 +745,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -762,7 +762,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -779,7 +779,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -796,7 +796,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -813,7 +813,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: macos
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -830,7 +830,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -847,7 +847,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -864,7 +864,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -881,7 +881,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -898,7 +898,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -915,7 +915,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -933,7 +933,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -952,7 +952,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -971,7 +971,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -990,7 +990,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1009,7 +1009,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1028,7 +1028,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1047,7 +1047,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1066,7 +1066,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1085,7 +1085,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1104,7 +1104,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1123,7 +1123,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1142,7 +1142,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1161,7 +1161,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1180,7 +1180,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1199,7 +1199,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1218,7 +1218,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1237,7 +1237,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1256,7 +1256,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1275,7 +1275,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1294,7 +1294,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1313,7 +1313,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1332,7 +1332,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
@ -1351,7 +1351,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
|
162
.github/workflows/staging.yml
vendored
162
.github/workflows/staging.yml
vendored
|
@ -513,7 +513,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-salt-onedir:
|
||||
name: Build Salt Onedir
|
||||
|
@ -529,7 +529,7 @@ jobs:
|
|||
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-rpm-pkgs:
|
||||
name: Build RPM Packages
|
||||
|
@ -541,7 +541,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-deb-pkgs:
|
||||
name: Build DEB Packages
|
||||
|
@ -553,7 +553,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
|
||||
build-windows-pkgs:
|
||||
name: Build Windows Packages
|
||||
|
@ -565,7 +565,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
environment: staging
|
||||
sign-packages: ${{ inputs.sign-windows-packages }}
|
||||
secrets: inherit
|
||||
|
@ -580,7 +580,7 @@ jobs:
|
|||
with:
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
relenv-version: "0.12.3"
|
||||
python-version: "3.10.11"
|
||||
python-version: "3.11.3"
|
||||
environment: staging
|
||||
sign-packages: true
|
||||
secrets: inherit
|
||||
|
@ -598,7 +598,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -615,7 +615,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -632,7 +632,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -649,7 +649,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -666,7 +666,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -683,7 +683,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -700,7 +700,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -717,7 +717,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -734,7 +734,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -751,7 +751,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -768,7 +768,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -785,7 +785,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -802,7 +802,7 @@ jobs:
|
|||
arch: aarch64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -819,7 +819,7 @@ jobs:
|
|||
arch: x86_64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: macos
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -836,7 +836,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -853,7 +853,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -870,7 +870,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -887,7 +887,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -904,7 +904,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -921,7 +921,7 @@ jobs:
|
|||
arch: amd64
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -939,7 +939,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -958,7 +958,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -977,7 +977,7 @@ jobs:
|
|||
arch: amd64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -996,7 +996,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1015,7 +1015,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1034,7 +1034,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1053,7 +1053,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1072,7 +1072,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1091,7 +1091,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1110,7 +1110,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1129,7 +1129,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1148,7 +1148,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1167,7 +1167,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1186,7 +1186,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1205,7 +1205,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1224,7 +1224,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1243,7 +1243,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1262,7 +1262,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1281,7 +1281,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1300,7 +1300,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1319,7 +1319,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1338,7 +1338,7 @@ jobs:
|
|||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -1357,7 +1357,7 @@ jobs:
|
|||
arch: aarch64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
@ -2120,7 +2120,7 @@ jobs:
|
|||
distro-slug: almalinux-8
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2139,7 +2139,7 @@ jobs:
|
|||
distro-slug: almalinux-8-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2158,7 +2158,7 @@ jobs:
|
|||
distro-slug: almalinux-9
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2177,7 +2177,7 @@ jobs:
|
|||
distro-slug: almalinux-9-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2196,7 +2196,7 @@ jobs:
|
|||
distro-slug: amazonlinux-2
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2215,7 +2215,7 @@ jobs:
|
|||
distro-slug: amazonlinux-2-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2234,7 +2234,7 @@ jobs:
|
|||
distro-slug: centos-7
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2253,7 +2253,7 @@ jobs:
|
|||
distro-slug: centos-7-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2272,7 +2272,7 @@ jobs:
|
|||
distro-slug: centosstream-8
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2291,7 +2291,7 @@ jobs:
|
|||
distro-slug: centosstream-8-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2310,7 +2310,7 @@ jobs:
|
|||
distro-slug: centosstream-9
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2329,7 +2329,7 @@ jobs:
|
|||
distro-slug: centosstream-9-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2348,7 +2348,7 @@ jobs:
|
|||
distro-slug: debian-10
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2367,7 +2367,7 @@ jobs:
|
|||
distro-slug: debian-11
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2386,7 +2386,7 @@ jobs:
|
|||
distro-slug: debian-11-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2405,7 +2405,7 @@ jobs:
|
|||
distro-slug: fedora-37
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2424,7 +2424,7 @@ jobs:
|
|||
distro-slug: fedora-37-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2443,7 +2443,7 @@ jobs:
|
|||
distro-slug: fedora-38
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2462,7 +2462,7 @@ jobs:
|
|||
distro-slug: fedora-38-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2481,7 +2481,7 @@ jobs:
|
|||
distro-slug: photonos-3
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2500,7 +2500,7 @@ jobs:
|
|||
distro-slug: photonos-4
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2519,7 +2519,7 @@ jobs:
|
|||
distro-slug: ubuntu-20.04
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2538,7 +2538,7 @@ jobs:
|
|||
distro-slug: ubuntu-20.04-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2557,7 +2557,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2576,7 +2576,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2595,7 +2595,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2614,7 +2614,7 @@ jobs:
|
|||
distro-slug: ubuntu-22.04-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2633,7 +2633,7 @@ jobs:
|
|||
distro-slug: macos-12
|
||||
platform: darwin
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2652,7 +2652,7 @@ jobs:
|
|||
distro-slug: macos-12
|
||||
platform: darwin
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2672,7 +2672,7 @@ jobs:
|
|||
platform: windows
|
||||
arch: amd64
|
||||
pkg-type: nsis
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2691,7 +2691,7 @@ jobs:
|
|||
platform: windows
|
||||
arch: amd64
|
||||
pkg-type: msi
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
@ -2710,7 +2710,7 @@ jobs:
|
|||
platform: windows
|
||||
arch: amd64
|
||||
pkg-type: onedir
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
|
|
130
.github/workflows/templates/nightly.yml.jinja
vendored
130
.github/workflows/templates/nightly.yml.jinja
vendored
|
@ -52,6 +52,81 @@ concurrency:
|
|||
<%- include "workflow-requirements-check.yml.jinja" %>
|
||||
<%- include "trigger-branch-workflows.yml.jinja" %>
|
||||
|
||||
{#- When we start using a slack app, we can update messages, not while using incoming webhooks
|
||||
<%- if workflow_slug == "nightly" %>
|
||||
|
||||
<%- do conclusion_needs.append('notify-slack') %>
|
||||
notify-slack:
|
||||
name: Notify Slack
|
||||
runs-on: ubuntu-latest
|
||||
environment: <{ gh_environment }>
|
||||
needs:
|
||||
<%- for need in prepare_workflow_needs.iter(consume=False) %>
|
||||
- <{ need }>
|
||||
<%- endfor %>
|
||||
outputs:
|
||||
update-ts: ${{ steps.slack.outputs.update-ts }}
|
||||
steps:
|
||||
- name: Notify Slack
|
||||
id: slack
|
||||
uses: slackapi/slack-github-action@v1.24.0
|
||||
with:
|
||||
payload: |
|
||||
{
|
||||
"attachments": [
|
||||
{
|
||||
"color": "ffca28",
|
||||
"fields": [
|
||||
{
|
||||
"title": "Workflow",
|
||||
"short": true,
|
||||
"value": "${{ github.workflow }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Workflow Run",
|
||||
"short": true,
|
||||
"value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Branch",
|
||||
"short": true,
|
||||
"value": "${{ github.ref_name }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Commit",
|
||||
"short": true,
|
||||
"value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Attempt",
|
||||
"short": true,
|
||||
"value": "${{ github.run_attempt }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Status",
|
||||
"short": true,
|
||||
"value": "running",
|
||||
"type": "mrkdwn"
|
||||
}
|
||||
],
|
||||
"author_name": "${{ github.event.sender.login }}",
|
||||
"author_link": "${{ github.event.sender.html_url }}",
|
||||
"author_icon": "${{ github.event.sender.avatar_url }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
|
||||
|
||||
<%- endif %>
|
||||
#}
|
||||
|
||||
<%- endblock pre_jobs %>
|
||||
|
||||
<%- block jobs %>
|
||||
|
@ -123,18 +198,59 @@ concurrency:
|
|||
|
||||
- name: Notify Slack
|
||||
id: slack
|
||||
if: always()
|
||||
uses: slackapi/slack-github-action@v1.24.0
|
||||
with:
|
||||
{#- When we start using a slack app, we can update messages, not while using incoming webhooks
|
||||
update-ts: ${{ needs.notify-slack.outputs.update-ts }}
|
||||
#}
|
||||
payload: |
|
||||
{
|
||||
"text": "Nightly Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
"blocks": [
|
||||
"attachments": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "Nightly Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }})\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||
}
|
||||
"fallback": "${{ github.workflow }} Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
"color": "${{ steps.get-workflow-info.outputs.conclusion != 'success' && 'ff3d00' || '00e676' }}",
|
||||
"fields": [
|
||||
{
|
||||
"title": "Workflow",
|
||||
"short": true,
|
||||
"value": "${{ github.workflow }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Workflow Run",
|
||||
"short": true,
|
||||
"value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Branch",
|
||||
"short": true,
|
||||
"value": "${{ github.ref_name }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Commit",
|
||||
"short": true,
|
||||
"value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Attempt",
|
||||
"short": true,
|
||||
"value": "${{ github.run_attempt }}",
|
||||
"type": "mrkdwn"
|
||||
},
|
||||
{
|
||||
"title": "Status",
|
||||
"short": true,
|
||||
"value": "${{ steps.get-workflow-info.outputs.conclusion }}",
|
||||
"type": "mrkdwn"
|
||||
}
|
||||
],
|
||||
"author_name": "${{ github.event.sender.login }}",
|
||||
"author_link": "${{ github.event.sender.html_url }}",
|
||||
"author_icon": "${{ github.event.sender.avatar_url }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1124,6 +1124,32 @@ repos:
|
|||
- --include=requirements/zeromq.txt
|
||||
- requirements/static/ci/pkgtests-windows.in
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-ci-pkg-3.11-requirements
|
||||
name: PKG tests CI Py3.11 Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.11/pkgtests\.in)))$
|
||||
pass_filenames: false
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.11
|
||||
- --platform=linux
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- requirements/static/ci/pkgtests.in
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-ci-windows-pkg-3.11-requirements
|
||||
name: PKG tests Windows CI Py3.11 Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests-windows|common)\.in|py3\.11/pkgtests-windows\.in)))$
|
||||
pass_filenames: false
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.11
|
||||
- --platform=windows
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- requirements/static/ci/pkgtests-windows.in
|
||||
|
||||
# <---- PKG ci requirements-----------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
@ -1191,13 +1217,13 @@ repos:
|
|||
|
||||
- repo: https://github.com/s0undt3ch/salt-rewrite
|
||||
# Automatically rewrite code with known rules
|
||||
rev: 2.4.4
|
||||
rev: 2.5.2
|
||||
hooks:
|
||||
- id: salt-rewrite
|
||||
alias: rewrite-docstrings
|
||||
name: Salt docstrings auto-fixes
|
||||
alias: rewrite-salt
|
||||
name: Salt code auto-fixes
|
||||
files: ^salt/.*\.py$
|
||||
args: [--silent, -F, fix_docstrings]
|
||||
args: [--silent, -F, fix_docstrings, -F, fix_warn_until]
|
||||
exclude: >
|
||||
(?x)^(
|
||||
salt/ext/.*
|
||||
|
|
1
changelog/62520.removed.md
Normal file
1
changelog/62520.removed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Removed RHEL 5 support since long since end-of-lifed
|
1
changelog/64457.added.md
Normal file
1
changelog/64457.added.md
Normal file
|
@ -0,0 +1 @@
|
|||
Switched Salt's onedir Python version to 3.11
|
7
changelog/64457.changed.md
Normal file
7
changelog/64457.changed.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
Addressed Python 3.11 deprecations:
|
||||
|
||||
* Switch to `FullArgSpec` since Py 3.11 no longer has `ArgSpec`, deprecated since Py 3.0
|
||||
* Stopped using the deprecated `cgi` module.
|
||||
* Stopped using the deprecated `pipes` module
|
||||
* Backport `locale.getdefaultlocale()` into Salt. It's getting removed in Py 3.13
|
||||
* Stopped using the deprecated `imp` module
|
1
changelog/64461.removed.md
Normal file
1
changelog/64461.removed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Removed 'transport' arg from salt.utils.event.get_event
|
1
changelog/64477.fixed.md
Normal file
1
changelog/64477.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fix file.symlink will not replace/update existing symlink
|
|
@ -1,4 +1,4 @@
|
|||
python_version: "3.10.11"
|
||||
python_version: "3.11.3"
|
||||
relenv_version: "0.12.3"
|
||||
release-branches:
|
||||
- "3006.x"
|
||||
|
|
|
@ -6,6 +6,9 @@
|
|||
Add release specific details below
|
||||
-->
|
||||
|
||||
## Python 3.11
|
||||
Salt's onedir packages now use Python 3.11
|
||||
|
||||
## Python 3.7 Support Dropped
|
||||
Support for python 3.7 has been dropped since it reached end-of-line in 27 Jun 2023.
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
case "$1" in
|
||||
configure)
|
||||
chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy
|
||||
PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush;")
|
||||
chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy
|
||||
;;
|
||||
esac
|
||||
|
|
|
@ -191,16 +191,16 @@ fi
|
|||
#-------------------------------------------------------------------------------
|
||||
_msg "Installing relenv"
|
||||
if [ -n "${RELENV_VERSION}" ]; then
|
||||
pip install relenv==${RELENV_VERSION} >/dev/null 2>&1
|
||||
export RELENV_FETCH_VERSION=${RELENV_VERSION}
|
||||
pip install relenv==${RELENV_VERSION}
|
||||
else
|
||||
pip install relenv >/dev/null 2>&1
|
||||
pip install relenv
|
||||
fi
|
||||
if [ -n "$(relenv --version)" ]; then
|
||||
_success
|
||||
else
|
||||
_failure
|
||||
fi
|
||||
export RELENV_FETCH_VERSION=$(relenv --version)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Building Python with Relenv
|
||||
|
@ -212,7 +212,7 @@ else
|
|||
# We want to suppress the output here so it looks nice
|
||||
# To see the output, remove the output redirection
|
||||
_msg "Fetching python (relenv)"
|
||||
relenv fetch --python $PY_VERSION >/dev/null 2>&1
|
||||
relenv fetch --python=$PY_VERSION
|
||||
if [ -f "$RELENV_DIR/build/$PY_VERSION-x86_64-macos.tar.xz" ]; then
|
||||
_success
|
||||
else
|
||||
|
|
|
@ -44,7 +44,7 @@ def grains(sminion):
|
|||
return sminion.opts["grains"].copy()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def _system_up_to_date(
|
||||
grains,
|
||||
shell,
|
||||
|
|
|
@ -3,13 +3,6 @@ import sys
|
|||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def grains(salt_call_cli):
|
||||
ret = salt_call_cli.run("--local", "grains.items")
|
||||
assert ret.data, ret
|
||||
return ret.data
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def pkg_name(salt_call_cli, grains):
|
||||
if sys.platform.startswith("win"):
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import psutil
|
||||
import pytest
|
||||
|
@ -61,7 +62,9 @@ def test_salt_cloud_dirs(install_salt):
|
|||
Test the correct user is running the Salt Master
|
||||
"""
|
||||
paths = [
|
||||
"/opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy",
|
||||
"/opt/saltstack/salt/lib/python{}.{}/site-packages/salt/cloud/deploy".format(
|
||||
*sys.version_info
|
||||
),
|
||||
"/etc/salt/cloud.deploy.d",
|
||||
]
|
||||
for name in paths:
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
mock >= 3.0.0
|
||||
# PyTest
|
||||
docker
|
||||
pytest >= 7.2.0
|
||||
pytest-salt-factories >= 1.0.0rc21; sys_platform == 'win32'
|
||||
pytest-salt-factories[docker] >= 1.0.0rc21; sys_platform != 'win32'
|
||||
pytest-salt-factories >= 1.0.0rc21
|
||||
pytest-tempdir >= 2019.10.12
|
||||
pytest-helpers-namespace >= 2019.1.8
|
||||
pytest-subtests
|
||||
|
|
|
@ -14,7 +14,6 @@ cherrypy>=17.4.1
|
|||
clustershell
|
||||
croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32'
|
||||
dnspython
|
||||
docker
|
||||
etcd3-py==0.1.6
|
||||
gitpython>=3.1.30
|
||||
jmespath
|
||||
|
@ -29,7 +28,10 @@ libnacl>=1.7.1; sys_platform != 'win32' and sys_platform != 'darwin'
|
|||
moto>=2.0.0
|
||||
napalm; sys_platform != 'win32'
|
||||
paramiko>=2.10.1; sys_platform != 'win32' and sys_platform != 'darwin'
|
||||
passlib[bcrypt]>=1.7.4
|
||||
# bcrypt is an extra requirement for passlib, and we shouldn't use extras, like, passlib[bcrypt]
|
||||
# since that will break using the compiled static requirements files as contraints file
|
||||
bcrypt
|
||||
passlib>=1.7.4
|
||||
pyinotify>=0.9.6; sys_platform != 'win32' and sys_platform != 'darwin' and platform_system != "openbsd"
|
||||
python-etcd>0.4.2
|
||||
pyvmomi
|
||||
|
|
|
@ -34,8 +34,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -114,9 +114,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -292,7 +290,7 @@ paramiko==3.2.0
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -359,7 +357,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -31,8 +31,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -111,9 +111,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -288,7 +286,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -354,7 +352,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -42,8 +42,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -125,9 +125,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -314,7 +312,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -387,7 +385,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -24,7 +24,7 @@ autocommand==2.2.2
|
|||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via passlib
|
||||
# via -r requirements/static/ci/common.in
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -110,7 +110,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via -r requirements/static/ci/common.in
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -242,7 +242,7 @@ packaging==23.1
|
|||
# -r requirements/base.txt
|
||||
# docker
|
||||
# pytest
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
patch==1.16
|
||||
# via -r requirements/static/ci/windows.in
|
||||
|
@ -311,7 +311,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -34,8 +34,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -114,9 +114,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
filelock==3.12.1
|
||||
|
@ -290,7 +288,7 @@ paramiko==3.2.0
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -357,7 +355,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -31,8 +31,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -111,9 +111,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
filelock==3.12.1
|
||||
|
@ -286,7 +284,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -352,7 +350,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -42,8 +42,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -125,9 +125,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
filelock==3.12.1
|
||||
|
@ -310,7 +308,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -383,7 +381,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
164
requirements/static/ci/py3.11/pkgtests-windows.txt
Normal file
164
requirements/static/ci/py3.11/pkgtests-windows.txt
Normal file
|
@ -0,0 +1,164 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile --output-file=requirements/static/ci/py3.11/pkgtests-windows.txt requirements/base.txt requirements/static/ci/pkgtests-windows.in requirements/zeromq.txt
|
||||
#
|
||||
attrs==23.1.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-skip-markers
|
||||
# pytest-system-statistics
|
||||
autocommand==2.2.2
|
||||
# via jaraco.text
|
||||
certifi==2023.5.7
|
||||
# via requests
|
||||
cffi==1.15.1
|
||||
# via
|
||||
# clr-loader
|
||||
# cryptography
|
||||
charset-normalizer==2.1.1
|
||||
# via requests
|
||||
cheroot==10.0.0
|
||||
# via cherrypy
|
||||
cherrypy==18.8.0
|
||||
# via -r requirements/static/ci/pkgtests-windows.in
|
||||
clr-loader==0.2.5
|
||||
# via pythonnet
|
||||
colorama==0.4.6
|
||||
# via pytest
|
||||
contextvars==2.4
|
||||
# via -r requirements/base.txt
|
||||
cryptography==40.0.2
|
||||
# via -r requirements/crypto.txt
|
||||
distlib==0.3.6
|
||||
# via virtualenv
|
||||
distro==1.8.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# pytest-skip-markers
|
||||
filelock==3.9.0
|
||||
# via virtualenv
|
||||
idna==3.4
|
||||
# via requests
|
||||
immutables==0.15
|
||||
# via contextvars
|
||||
inflect==6.0.2
|
||||
# via jaraco.text
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
jaraco.collections==4.1.0
|
||||
# via cherrypy
|
||||
jaraco.context==4.3.0
|
||||
# via jaraco.text
|
||||
jaraco.functools==3.7.0
|
||||
# via
|
||||
# cheroot
|
||||
# jaraco.text
|
||||
# tempora
|
||||
jaraco.text==3.11.1
|
||||
# via jaraco.collections
|
||||
jinja2==3.1.2
|
||||
# via -r requirements/base.txt
|
||||
jmespath==1.0.1
|
||||
# via -r requirements/base.txt
|
||||
looseversion==1.2.0
|
||||
# via -r requirements/base.txt
|
||||
markupsafe==2.1.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# jinja2
|
||||
more-itertools==9.1.0
|
||||
# via
|
||||
# cheroot
|
||||
# cherrypy
|
||||
# jaraco.functools
|
||||
# jaraco.text
|
||||
msgpack==1.0.5
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# pytest-salt-factories
|
||||
packaging==23.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# pytest
|
||||
platformdirs==2.6.2
|
||||
# via virtualenv
|
||||
pluggy==1.0.0
|
||||
# via pytest
|
||||
portend==3.1.0
|
||||
# via cherrypy
|
||||
psutil==5.9.5
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
pycparser==2.21
|
||||
# via cffi
|
||||
pycryptodomex==3.9.8
|
||||
# via -r requirements/crypto.txt
|
||||
pydantic==1.10.4
|
||||
# via inflect
|
||||
pytest-helpers-namespace==2021.12.29
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
pytest-salt-factories==1.0.0rc17
|
||||
# via -r requirements/static/ci/pkgtests-windows.in
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
pytest-system-statistics==1.0.2
|
||||
# via pytest-salt-factories
|
||||
pytest-tempdir==2019.10.12
|
||||
# via pytest-salt-factories
|
||||
pytest==7.3.1
|
||||
# via
|
||||
# pytest-helpers-namespace
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-skip-markers
|
||||
# pytest-system-statistics
|
||||
# pytest-tempdir
|
||||
pythonnet==3.0.1
|
||||
# via -r requirements/static/ci/pkgtests-windows.in
|
||||
pytz==2023.3
|
||||
# via tempora
|
||||
pywin32==306
|
||||
# via
|
||||
# pytest-skip-markers
|
||||
# wmi
|
||||
pyyaml==6.0
|
||||
# via -r requirements/base.txt
|
||||
pyzmq==25.1.0
|
||||
# via
|
||||
# -r requirements/zeromq.txt
|
||||
# pytest-salt-factories
|
||||
requests==2.31.0
|
||||
# via -r requirements/base.txt
|
||||
tempora==5.2.2
|
||||
# via portend
|
||||
tornado==6.3.2 ; python_version >= "3.8"
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
# via
|
||||
# pydantic
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
urllib3==1.26.14
|
||||
# via requests
|
||||
virtualenv==20.18.0
|
||||
# via pytest-salt-factories
|
||||
wmi==1.5.1 ; sys_platform == "win32"
|
||||
# via -r requirements/static/ci/pkgtests-windows.in
|
||||
zc.lockfile==2.0
|
||||
# via cherrypy
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
159
requirements/static/ci/py3.11/pkgtests.txt
Normal file
159
requirements/static/ci/py3.11/pkgtests.txt
Normal file
|
@ -0,0 +1,159 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile --output-file=requirements/static/ci/py3.11/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt
|
||||
#
|
||||
attrs==23.1.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-skip-markers
|
||||
# pytest-system-statistics
|
||||
autocommand==2.2.2
|
||||
# via jaraco.text
|
||||
certifi==2023.5.7
|
||||
# via requests
|
||||
cffi==1.15.1
|
||||
# via cryptography
|
||||
charset-normalizer==2.1.1
|
||||
# via requests
|
||||
cheroot==10.0.0
|
||||
# via cherrypy
|
||||
cherrypy==18.8.0
|
||||
# via -r requirements/static/ci/pkgtests.in
|
||||
contextvars==2.4
|
||||
# via -r requirements/base.txt
|
||||
cryptography==40.0.2
|
||||
# via -r requirements/crypto.txt
|
||||
distlib==0.3.6
|
||||
# via virtualenv
|
||||
distro==1.8.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# pytest-skip-markers
|
||||
docker==6.1.2
|
||||
# via -r requirements/static/ci/pkgtests.in
|
||||
filelock==3.9.0
|
||||
# via virtualenv
|
||||
idna==3.4
|
||||
# via requests
|
||||
immutables==0.15
|
||||
# via contextvars
|
||||
inflect==6.0.2
|
||||
# via jaraco.text
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
jaraco.collections==4.1.0
|
||||
# via cherrypy
|
||||
jaraco.context==4.3.0
|
||||
# via jaraco.text
|
||||
jaraco.functools==3.7.0
|
||||
# via
|
||||
# cheroot
|
||||
# jaraco.text
|
||||
# tempora
|
||||
jaraco.text==3.11.1
|
||||
# via jaraco.collections
|
||||
jinja2==3.1.2
|
||||
# via -r requirements/base.txt
|
||||
jmespath==1.0.1
|
||||
# via -r requirements/base.txt
|
||||
looseversion==1.2.0
|
||||
# via -r requirements/base.txt
|
||||
markupsafe==2.1.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# jinja2
|
||||
more-itertools==9.1.0
|
||||
# via
|
||||
# cheroot
|
||||
# cherrypy
|
||||
# jaraco.functools
|
||||
# jaraco.text
|
||||
msgpack==1.0.5
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# pytest-salt-factories
|
||||
packaging==23.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# docker
|
||||
# pytest
|
||||
platformdirs==2.6.2
|
||||
# via virtualenv
|
||||
pluggy==1.0.0
|
||||
# via pytest
|
||||
portend==3.1.0
|
||||
# via cherrypy
|
||||
psutil==5.9.5
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
pycparser==2.21
|
||||
# via cffi
|
||||
pycryptodomex==3.9.8
|
||||
# via -r requirements/crypto.txt
|
||||
pydantic==1.10.4
|
||||
# via inflect
|
||||
pytest-helpers-namespace==2021.12.29
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
pytest-salt-factories==1.0.0rc17
|
||||
# via -r requirements/static/ci/pkgtests.in
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
pytest-system-statistics==1.0.2
|
||||
# via pytest-salt-factories
|
||||
pytest-tempdir==2019.10.12
|
||||
# via pytest-salt-factories
|
||||
pytest==7.3.1
|
||||
# via
|
||||
# pytest-helpers-namespace
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-skip-markers
|
||||
# pytest-system-statistics
|
||||
# pytest-tempdir
|
||||
pytz==2023.3
|
||||
# via tempora
|
||||
pyyaml==6.0
|
||||
# via -r requirements/base.txt
|
||||
pyzmq==25.1.0
|
||||
# via
|
||||
# -r requirements/zeromq.txt
|
||||
# pytest-salt-factories
|
||||
requests==2.31.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# docker
|
||||
tempora==5.2.2
|
||||
# via portend
|
||||
tornado==6.3.2 ; python_version >= "3.8"
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
# via
|
||||
# pydantic
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
urllib3==1.26.14
|
||||
# via
|
||||
# docker
|
||||
# requests
|
||||
virtualenv==20.17.1
|
||||
# via pytest-salt-factories
|
||||
websocket-client==1.5.1
|
||||
# via docker
|
||||
zc.lockfile==2.0
|
||||
# via cherrypy
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
|
@ -24,7 +24,7 @@ autocommand==2.2.2
|
|||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via passlib
|
||||
# via -r requirements/static/ci/common.in
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -110,7 +110,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via -r requirements/static/ci/common.in
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
filelock==3.12.1
|
||||
|
@ -240,7 +240,7 @@ packaging==23.1
|
|||
# -r requirements/base.txt
|
||||
# docker
|
||||
# pytest
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
patch==1.16
|
||||
# via -r requirements/static/ci/windows.in
|
||||
|
@ -309,7 +309,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -31,8 +31,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -111,9 +111,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -292,7 +290,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -357,7 +355,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -42,8 +42,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -125,9 +125,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -318,7 +316,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -391,7 +389,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -24,7 +24,7 @@ autocommand==2.2.2
|
|||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via passlib
|
||||
# via -r requirements/static/ci/common.in
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -110,7 +110,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via -r requirements/static/ci/common.in
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -246,7 +246,7 @@ packaging==23.1
|
|||
# -r requirements/base.txt
|
||||
# docker
|
||||
# pytest
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
patch==1.16
|
||||
# via -r requirements/static/ci/windows.in
|
||||
|
@ -315,7 +315,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -34,8 +34,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -114,9 +114,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -292,7 +290,7 @@ paramiko==3.2.0
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -359,7 +357,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -31,8 +31,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -111,9 +111,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -288,7 +286,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -354,7 +352,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -42,8 +42,8 @@ autocommand==2.2.2
|
|||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
# passlib
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -125,9 +125,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
# pytest-salt-factories
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -316,7 +314,7 @@ paramiko==3.2.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
pathspec==0.11.1
|
||||
# via yamllint
|
||||
|
@ -389,7 +387,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -24,7 +24,7 @@ autocommand==2.2.2
|
|||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# jaraco.text
|
||||
bcrypt==4.0.1
|
||||
# via passlib
|
||||
# via -r requirements/static/ci/common.in
|
||||
boto3==1.26.152
|
||||
# via
|
||||
# -r requirements/static/ci/common.in
|
||||
|
@ -110,7 +110,7 @@ dnspython==2.3.0
|
|||
# -r requirements/static/ci/common.in
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via -r requirements/static/ci/common.in
|
||||
# via -r requirements/pytest.txt
|
||||
etcd3-py==0.1.6
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.1.1
|
||||
|
@ -242,7 +242,7 @@ packaging==23.1
|
|||
# -r requirements/base.txt
|
||||
# docker
|
||||
# pytest
|
||||
passlib[bcrypt]==1.7.4
|
||||
passlib==1.7.4
|
||||
# via -r requirements/static/ci/common.in
|
||||
patch==1.16
|
||||
# via -r requirements/static/ci/windows.in
|
||||
|
@ -311,7 +311,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-httpserver==1.0.8
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32"
|
||||
pytest-salt-factories==1.0.0rc21
|
||||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.7.0
|
||||
# via pytest-salt-factories
|
||||
|
|
|
@ -39,6 +39,44 @@ warnings.filterwarnings(
|
|||
)
|
||||
|
||||
|
||||
def __getdefaultlocale(envvars=("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")):
|
||||
"""
|
||||
This function was backported from Py3.11 which started triggering a
|
||||
deprecation warning about it's removal in 3.13.
|
||||
"""
|
||||
import locale
|
||||
|
||||
try:
|
||||
# check if it's supported by the _locale module
|
||||
import _locale
|
||||
|
||||
code, encoding = _locale._getdefaultlocale()
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
else:
|
||||
# make sure the code/encoding values are valid
|
||||
if sys.platform == "win32" and code and code[:2] == "0x":
|
||||
# map windows language identifier to language name
|
||||
code = locale.windows_locale.get(int(code, 0))
|
||||
# ...add other platform-specific processing here, if
|
||||
# necessary...
|
||||
return code, encoding
|
||||
|
||||
# fall back on POSIX behaviour
|
||||
import os
|
||||
|
||||
lookup = os.environ.get
|
||||
for variable in envvars:
|
||||
localename = lookup(variable, None)
|
||||
if localename:
|
||||
if variable == "LANGUAGE":
|
||||
localename = localename.split(":")[0]
|
||||
break
|
||||
else:
|
||||
localename = "C"
|
||||
return locale._parse_localename(localename)
|
||||
|
||||
|
||||
def __define_global_system_encoding_variable__():
|
||||
import sys
|
||||
|
||||
|
@ -57,17 +95,14 @@ def __define_global_system_encoding_variable__():
|
|||
# If the system is properly configured this should return a valid
|
||||
# encoding. MS Windows has problems with this and reports the wrong
|
||||
# encoding
|
||||
import locale
|
||||
|
||||
try:
|
||||
encoding = locale.getdefaultlocale()[-1]
|
||||
encoding = __getdefaultlocale()[-1]
|
||||
except ValueError:
|
||||
# A bad locale setting was most likely found:
|
||||
# https://github.com/saltstack/salt/issues/26063
|
||||
pass
|
||||
|
||||
# This is now garbage collectable
|
||||
del locale
|
||||
if not encoding:
|
||||
# This is most likely ascii which is not the best but we were
|
||||
# unable to find a better encoding. If this fails, we fall all
|
||||
|
|
|
@ -747,7 +747,7 @@ class SlackClient:
|
|||
results = {}
|
||||
for jid in outstanding_jids:
|
||||
# results[jid] = runner.cmd('jobs.lookup_jid', [jid])
|
||||
if self.master_minion.returners["{}.get_jid".format(source)](jid):
|
||||
if self.master_minion.returners[f"{source}.get_jid"](jid):
|
||||
job_result = runner.cmd("jobs.list_job", [jid])
|
||||
jid_result = job_result.get("Result", {})
|
||||
jid_function = job_result.get("Function", {})
|
||||
|
@ -838,7 +838,7 @@ class SlackClient:
|
|||
channel.send_message(return_prefix)
|
||||
ts = time.time()
|
||||
st = datetime.datetime.fromtimestamp(ts).strftime("%Y%m%d%H%M%S%f")
|
||||
filename = "salt-results-{}.yaml".format(st)
|
||||
filename = f"salt-results-{st}.yaml"
|
||||
r = self.sc.api_call(
|
||||
"files.upload",
|
||||
channels=channel.id,
|
||||
|
@ -928,7 +928,7 @@ def start(
|
|||
"""
|
||||
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This 'slack' engine will be deprecated and "
|
||||
"will be replace by the slack_bolt engine. This new "
|
||||
"engine will use the new Bolt library from Slack and requires "
|
||||
|
@ -947,4 +947,4 @@ def start(
|
|||
)
|
||||
client.run_commands_from_slack_async(message_generator, fire_all, tag, control)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
raise Exception("{}".format(traceback.format_exc()))
|
||||
raise Exception(f"{traceback.format_exc()}")
|
||||
|
|
|
@ -11,7 +11,6 @@ as those returned here
|
|||
|
||||
import datetime
|
||||
import hashlib
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
|
@ -33,6 +32,7 @@ import salt.modules.smbios
|
|||
import salt.utils.args
|
||||
import salt.utils.dns
|
||||
import salt.utils.files
|
||||
import salt.utils.locales
|
||||
import salt.utils.network
|
||||
import salt.utils.path
|
||||
import salt.utils.pkg.rpm
|
||||
|
@ -289,7 +289,7 @@ def _linux_gpu_data():
|
|||
|
||||
devs = []
|
||||
try:
|
||||
lspci_out = __salt__["cmd.run"]("{} -vmm".format(lspci))
|
||||
lspci_out = __salt__["cmd.run"](f"{lspci} -vmm")
|
||||
|
||||
cur_dev = {}
|
||||
error = False
|
||||
|
@ -363,7 +363,7 @@ def _netbsd_gpu_data():
|
|||
for line in pcictl_out.splitlines():
|
||||
for vendor in known_vendors:
|
||||
vendor_match = re.match(
|
||||
r"[0-9:]+ ({}) (.+) \(VGA .+\)".format(vendor), line, re.IGNORECASE
|
||||
rf"[0-9:]+ ({vendor}) (.+) \(VGA .+\)", line, re.IGNORECASE
|
||||
)
|
||||
if vendor_match:
|
||||
gpus.append(
|
||||
|
@ -425,18 +425,18 @@ def _bsd_cpudata(osdata):
|
|||
if sysctl:
|
||||
cmds.update(
|
||||
{
|
||||
"num_cpus": "{} -n hw.ncpu".format(sysctl),
|
||||
"cpuarch": "{} -n hw.machine".format(sysctl),
|
||||
"cpu_model": "{} -n hw.model".format(sysctl),
|
||||
"num_cpus": f"{sysctl} -n hw.ncpu",
|
||||
"cpuarch": f"{sysctl} -n hw.machine",
|
||||
"cpu_model": f"{sysctl} -n hw.model",
|
||||
}
|
||||
)
|
||||
|
||||
if arch and osdata["kernel"] == "OpenBSD":
|
||||
cmds["cpuarch"] = "{} -s".format(arch)
|
||||
cmds["cpuarch"] = f"{arch} -s"
|
||||
|
||||
if osdata["kernel"] == "Darwin":
|
||||
cmds["cpu_model"] = "{} -n machdep.cpu.brand_string".format(sysctl)
|
||||
cmds["cpu_flags"] = "{} -n machdep.cpu.features".format(sysctl)
|
||||
cmds["cpu_model"] = f"{sysctl} -n machdep.cpu.brand_string"
|
||||
cmds["cpu_flags"] = f"{sysctl} -n machdep.cpu.features"
|
||||
|
||||
grains = {k: __salt__["cmd.run"](v) for k, v in cmds.items()}
|
||||
|
||||
|
@ -521,7 +521,7 @@ def _aix_cpudata():
|
|||
grains = {}
|
||||
cmd = salt.utils.path.which("prtconf")
|
||||
if cmd:
|
||||
data = __salt__["cmd.run"]("{}".format(cmd)) + os.linesep
|
||||
data = __salt__["cmd.run"](f"{cmd}") + os.linesep
|
||||
for dest, regstring in (
|
||||
("cpuarch", r"(?im)^\s*Processor\s+Type:\s+(\S+)"),
|
||||
("cpu_flags", r"(?im)^\s*Processor\s+Version:\s+(\S+)"),
|
||||
|
@ -567,9 +567,9 @@ def _osx_memdata():
|
|||
|
||||
sysctl = salt.utils.path.which("sysctl")
|
||||
if sysctl:
|
||||
mem = __salt__["cmd.run"]("{} -n hw.memsize".format(sysctl))
|
||||
mem = __salt__["cmd.run"](f"{sysctl} -n hw.memsize")
|
||||
swap_total = (
|
||||
__salt__["cmd.run"]("{} -n vm.swapusage".format(sysctl))
|
||||
__salt__["cmd.run"](f"{sysctl} -n vm.swapusage")
|
||||
.split()[2]
|
||||
.replace(",", ".")
|
||||
)
|
||||
|
@ -594,20 +594,20 @@ def _bsd_memdata(osdata):
|
|||
|
||||
sysctl = salt.utils.path.which("sysctl")
|
||||
if sysctl:
|
||||
mem = __salt__["cmd.run"]("{} -n hw.physmem".format(sysctl))
|
||||
mem = __salt__["cmd.run"](f"{sysctl} -n hw.physmem")
|
||||
if osdata["kernel"] == "NetBSD" and mem.startswith("-"):
|
||||
mem = __salt__["cmd.run"]("{} -n hw.physmem64".format(sysctl))
|
||||
mem = __salt__["cmd.run"](f"{sysctl} -n hw.physmem64")
|
||||
grains["mem_total"] = int(mem) // 1024 // 1024
|
||||
|
||||
if osdata["kernel"] in ["OpenBSD", "NetBSD"]:
|
||||
swapctl = salt.utils.path.which("swapctl")
|
||||
swap_data = __salt__["cmd.run"]("{} -sk".format(swapctl))
|
||||
swap_data = __salt__["cmd.run"](f"{swapctl} -sk")
|
||||
if swap_data == "no swap devices configured":
|
||||
swap_total = 0
|
||||
else:
|
||||
swap_total = swap_data.split(" ")[1]
|
||||
else:
|
||||
swap_total = __salt__["cmd.run"]("{} -n vm.swap_total".format(sysctl))
|
||||
swap_total = __salt__["cmd.run"](f"{sysctl} -n vm.swap_total")
|
||||
grains["swap_total"] = int(swap_total) // 1024 // 1024
|
||||
return grains
|
||||
|
||||
|
@ -625,7 +625,7 @@ def _sunos_memdata():
|
|||
grains["mem_total"] = int(comps[2].strip())
|
||||
|
||||
swap_cmd = salt.utils.path.which("swap")
|
||||
swap_data = __salt__["cmd.run"]("{} -s".format(swap_cmd)).split()
|
||||
swap_data = __salt__["cmd.run"](f"{swap_cmd} -s").split()
|
||||
try:
|
||||
swap_avail = int(swap_data[-2][:-1])
|
||||
swap_used = int(swap_data[-4][:-1])
|
||||
|
@ -653,7 +653,7 @@ def _aix_memdata():
|
|||
|
||||
swap_cmd = salt.utils.path.which("swap")
|
||||
if swap_cmd:
|
||||
swap_data = __salt__["cmd.run"]("{} -s".format(swap_cmd)).split()
|
||||
swap_data = __salt__["cmd.run"](f"{swap_cmd} -s").split()
|
||||
try:
|
||||
swap_total = (int(swap_data[-2]) + int(swap_data[-6])) * 4
|
||||
except ValueError:
|
||||
|
@ -706,7 +706,7 @@ def _aix_get_machine_id():
|
|||
grains = {}
|
||||
cmd = salt.utils.path.which("lsattr")
|
||||
if cmd:
|
||||
data = __salt__["cmd.run"]("{} -El sys0".format(cmd)) + os.linesep
|
||||
data = __salt__["cmd.run"](f"{cmd} -El sys0") + os.linesep
|
||||
uuid_regexes = [re.compile(r"(?im)^\s*os_uuid\s+(\S+)\s+(.*)")]
|
||||
for regex in uuid_regexes:
|
||||
res = regex.search(data)
|
||||
|
@ -1017,7 +1017,7 @@ def _virtual(osdata):
|
|||
subtype_cmd = "{} -c current get -H -o value {}-role".format(
|
||||
command, role
|
||||
)
|
||||
ret = __salt__["cmd.run"]("{}".format(subtype_cmd))
|
||||
ret = __salt__["cmd.run"](f"{subtype_cmd}")
|
||||
if ret == "true":
|
||||
roles.append(role)
|
||||
if roles:
|
||||
|
@ -1163,14 +1163,14 @@ def _virtual(osdata):
|
|||
elif osdata["kernel"] == "FreeBSD":
|
||||
kenv = salt.utils.path.which("kenv")
|
||||
if kenv:
|
||||
product = __salt__["cmd.run"]("{} smbios.system.product".format(kenv))
|
||||
maker = __salt__["cmd.run"]("{} smbios.system.maker".format(kenv))
|
||||
product = __salt__["cmd.run"](f"{kenv} smbios.system.product")
|
||||
maker = __salt__["cmd.run"](f"{kenv} smbios.system.maker")
|
||||
if product.startswith("VMware"):
|
||||
grains["virtual"] = "VMware"
|
||||
if product.startswith("VirtualBox"):
|
||||
grains["virtual"] = "VirtualBox"
|
||||
if maker.startswith("Xen"):
|
||||
grains["virtual_subtype"] = "{} {}".format(maker, product)
|
||||
grains["virtual_subtype"] = f"{maker} {product}"
|
||||
grains["virtual"] = "xen"
|
||||
if maker.startswith("Microsoft") and product.startswith("Virtual"):
|
||||
grains["virtual"] = "VirtualPC"
|
||||
|
@ -1181,9 +1181,9 @@ def _virtual(osdata):
|
|||
if maker.startswith("Amazon EC2"):
|
||||
grains["virtual"] = "Nitro"
|
||||
if sysctl:
|
||||
hv_vendor = __salt__["cmd.run"]("{} -n hw.hv_vendor".format(sysctl))
|
||||
model = __salt__["cmd.run"]("{} -n hw.model".format(sysctl))
|
||||
jail = __salt__["cmd.run"]("{} -n security.jail.jailed".format(sysctl))
|
||||
hv_vendor = __salt__["cmd.run"](f"{sysctl} -n hw.hv_vendor")
|
||||
model = __salt__["cmd.run"](f"{sysctl} -n hw.model")
|
||||
jail = __salt__["cmd.run"](f"{sysctl} -n security.jail.jailed")
|
||||
if "bhyve" in hv_vendor:
|
||||
grains["virtual"] = "bhyve"
|
||||
elif "QEMU Virtual CPU" in model:
|
||||
|
@ -1199,22 +1199,19 @@ def _virtual(osdata):
|
|||
elif osdata["kernel"] == "NetBSD":
|
||||
if sysctl:
|
||||
if "QEMU Virtual CPU" in __salt__["cmd.run"](
|
||||
"{} -n machdep.cpu_brand".format(sysctl)
|
||||
f"{sysctl} -n machdep.cpu_brand"
|
||||
):
|
||||
grains["virtual"] = "kvm"
|
||||
elif "invalid" not in __salt__["cmd.run"](
|
||||
"{} -n machdep.xen.suspend".format(sysctl)
|
||||
f"{sysctl} -n machdep.xen.suspend"
|
||||
):
|
||||
grains["virtual"] = "Xen PV DomU"
|
||||
elif "VMware" in __salt__["cmd.run"](
|
||||
"{} -n machdep.dmi.system-vendor".format(sysctl)
|
||||
f"{sysctl} -n machdep.dmi.system-vendor"
|
||||
):
|
||||
grains["virtual"] = "VMware"
|
||||
# NetBSD has Xen dom0 support
|
||||
elif (
|
||||
__salt__["cmd.run"]("{} -n machdep.idle-mechanism".format(sysctl))
|
||||
== "xen"
|
||||
):
|
||||
elif __salt__["cmd.run"](f"{sysctl} -n machdep.idle-mechanism") == "xen":
|
||||
if os.path.isfile("/var/run/xenconsoled.pid"):
|
||||
grains["virtual_subtype"] = "Xen Dom0"
|
||||
elif osdata["kernel"] == "SunOS":
|
||||
|
@ -1222,7 +1219,7 @@ def _virtual(osdata):
|
|||
# check the zonename here as fallback
|
||||
zonename = salt.utils.path.which("zonename")
|
||||
if zonename:
|
||||
zone = __salt__["cmd.run"]("{}".format(zonename))
|
||||
zone = __salt__["cmd.run"](f"{zonename}")
|
||||
if zone != "global":
|
||||
grains["virtual"] = "zone"
|
||||
|
||||
|
@ -1251,7 +1248,7 @@ def _virtual(osdata):
|
|||
r".*Product Name: ([^\r\n]*).*", output, flags=re.DOTALL
|
||||
)
|
||||
if product:
|
||||
grains["virtual_subtype"] = "Amazon EC2 ({})".format(product[1])
|
||||
grains["virtual_subtype"] = f"Amazon EC2 ({product[1]})"
|
||||
elif re.match(r".*Version: [^\r\n]+\.amazon.*", output, flags=re.DOTALL):
|
||||
grains["virtual_subtype"] = "Amazon EC2"
|
||||
|
||||
|
@ -1283,9 +1280,7 @@ def _virtual_hv(osdata):
|
|||
try:
|
||||
version = {}
|
||||
for fn in ("major", "minor", "extra"):
|
||||
with salt.utils.files.fopen(
|
||||
"/sys/hypervisor/version/{}".format(fn), "r"
|
||||
) as fhr:
|
||||
with salt.utils.files.fopen(f"/sys/hypervisor/version/{fn}", "r") as fhr:
|
||||
version[fn] = salt.utils.stringutils.to_unicode(fhr.read().strip())
|
||||
grains["virtual_hv_version"] = "{}.{}{}".format(
|
||||
version["major"], version["minor"], version["extra"]
|
||||
|
@ -1441,7 +1436,7 @@ def _windows_os_release_grain(caption, product_type):
|
|||
# ie: R2
|
||||
if re.match(r"^R\d+$", item):
|
||||
release = item
|
||||
os_release = "{}Server{}".format(version, release)
|
||||
os_release = f"{version}Server{release}"
|
||||
else:
|
||||
for item in caption.split(" "):
|
||||
# If it's a number, decimal number, Thin or Vista, then it's the
|
||||
|
@ -1632,7 +1627,7 @@ def _linux_devicetree_platform_data():
|
|||
try:
|
||||
# /proc/device-tree should be used instead of /sys/firmware/devicetree/base
|
||||
# see https://github.com/torvalds/linux/blob/v5.13/Documentation/ABI/testing/sysfs-firmware-ofw#L14
|
||||
loc = "/proc/device-tree/{}".format(path)
|
||||
loc = f"/proc/device-tree/{path}"
|
||||
if os.path.isfile(loc):
|
||||
with salt.utils.files.fopen(loc, mode="r") as f:
|
||||
return f.read().rstrip("\x00") # all strings are null-terminated
|
||||
|
@ -1871,18 +1866,13 @@ def _linux_bin_exists(binary):
|
|||
"""
|
||||
for search_cmd in ("which", "type -ap"):
|
||||
try:
|
||||
return __salt__["cmd.retcode"]("{} {}".format(search_cmd, binary)) == 0
|
||||
return __salt__["cmd.retcode"](f"{search_cmd} {binary}") == 0
|
||||
except salt.exceptions.CommandExecutionError:
|
||||
pass
|
||||
|
||||
try:
|
||||
return (
|
||||
len(
|
||||
__salt__["cmd.run_all"]("whereis -b {}".format(binary))[
|
||||
"stdout"
|
||||
].split()
|
||||
)
|
||||
> 1
|
||||
len(__salt__["cmd.run_all"](f"whereis -b {binary}")["stdout"].split()) > 1
|
||||
)
|
||||
except salt.exceptions.CommandExecutionError:
|
||||
return False
|
||||
|
@ -1900,7 +1890,7 @@ def _parse_lsb_release():
|
|||
pass
|
||||
else:
|
||||
# Adds lsb_distrib_{id,release,codename,description}
|
||||
ret["lsb_{}".format(key.lower())] = value.rstrip()
|
||||
ret[f"lsb_{key.lower()}"] = value.rstrip()
|
||||
except OSError as exc:
|
||||
log.trace("Failed to parse /etc/lsb-release: %s", exc)
|
||||
return ret
|
||||
|
@ -2624,7 +2614,7 @@ def os_data():
|
|||
osbuild = __salt__["cmd.run"]("sw_vers -buildVersion")
|
||||
grains["os"] = "MacOS"
|
||||
grains["os_family"] = "MacOS"
|
||||
grains["osfullname"] = "{} {}".format(osname, osrelease)
|
||||
grains["osfullname"] = f"{osname} {osrelease}"
|
||||
grains["osrelease"] = osrelease
|
||||
grains["osbuild"] = osbuild
|
||||
grains["init"] = "launchd"
|
||||
|
@ -2698,7 +2688,7 @@ def locale_info():
|
|||
(
|
||||
grains["locale_info"]["defaultlanguage"],
|
||||
grains["locale_info"]["defaultencoding"],
|
||||
) = locale.getdefaultlocale()
|
||||
) = salt.utils.locales.getdefaultlocale()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
# locale.getdefaultlocale can ValueError!! Catch anything else it
|
||||
# might do, per #2205
|
||||
|
@ -3165,7 +3155,7 @@ def _hw_data(osdata):
|
|||
"productname": "DeviceDesc",
|
||||
}
|
||||
for grain_name, cmd_key in hwdata.items():
|
||||
result = __salt__["cmd.run_all"]("fw_printenv {}".format(cmd_key))
|
||||
result = __salt__["cmd.run_all"](f"fw_printenv {cmd_key}")
|
||||
if result["retcode"] == 0:
|
||||
uboot_keyval = result["stdout"].split("=")
|
||||
grains[grain_name] = _clean_value(grain_name, uboot_keyval[1])
|
||||
|
@ -3185,7 +3175,7 @@ def _hw_data(osdata):
|
|||
"uuid": "smbios.system.uuid",
|
||||
}
|
||||
for key, val in fbsd_hwdata.items():
|
||||
value = __salt__["cmd.run"]("{} {}".format(kenv, val))
|
||||
value = __salt__["cmd.run"](f"{kenv} {val}")
|
||||
grains[key] = _clean_value(key, value)
|
||||
elif osdata["kernel"] == "OpenBSD":
|
||||
sysctl = salt.utils.path.which("sysctl")
|
||||
|
@ -3197,7 +3187,7 @@ def _hw_data(osdata):
|
|||
"uuid": "hw.uuid",
|
||||
}
|
||||
for key, oid in hwdata.items():
|
||||
value = __salt__["cmd.run"]("{} -n {}".format(sysctl, oid))
|
||||
value = __salt__["cmd.run"](f"{sysctl} -n {oid}")
|
||||
if not value.endswith(" value is not available"):
|
||||
grains[key] = _clean_value(key, value)
|
||||
elif osdata["kernel"] == "NetBSD":
|
||||
|
@ -3212,7 +3202,7 @@ def _hw_data(osdata):
|
|||
"uuid": "machdep.dmi.system-uuid",
|
||||
}
|
||||
for key, oid in nbsd_hwdata.items():
|
||||
result = __salt__["cmd.run_all"]("{} -n {}".format(sysctl, oid))
|
||||
result = __salt__["cmd.run_all"](f"{sysctl} -n {oid}")
|
||||
if result["retcode"] == 0:
|
||||
grains[key] = _clean_value(key, result["stdout"])
|
||||
elif osdata["kernel"] == "Darwin":
|
||||
|
@ -3220,7 +3210,7 @@ def _hw_data(osdata):
|
|||
sysctl = salt.utils.path.which("sysctl")
|
||||
hwdata = {"productname": "hw.model"}
|
||||
for key, oid in hwdata.items():
|
||||
value = __salt__["cmd.run"]("{} -b {}".format(sysctl, oid))
|
||||
value = __salt__["cmd.run"](f"{sysctl} -b {oid}")
|
||||
if not value.endswith(" is invalid"):
|
||||
grains[key] = _clean_value(key, value)
|
||||
elif osdata["kernel"] == "SunOS" and osdata["cpuarch"].startswith("sparc"):
|
||||
|
@ -3234,7 +3224,7 @@ def _hw_data(osdata):
|
|||
("/usr/sbin/virtinfo", "-a"),
|
||||
):
|
||||
if salt.utils.path.which(cmd): # Also verifies that cmd is executable
|
||||
data += __salt__["cmd.run"]("{} {}".format(cmd, args))
|
||||
data += __salt__["cmd.run"](f"{cmd} {args}")
|
||||
data += "\n"
|
||||
|
||||
sn_regexes = [
|
||||
|
@ -3349,7 +3339,7 @@ def _hw_data(osdata):
|
|||
elif osdata["kernel"] == "AIX":
|
||||
cmd = salt.utils.path.which("prtconf")
|
||||
if cmd:
|
||||
data = __salt__["cmd.run"]("{}".format(cmd)) + os.linesep
|
||||
data = __salt__["cmd.run"](f"{cmd}") + os.linesep
|
||||
for dest, regstring in (
|
||||
("serialnumber", r"(?im)^\s*Machine\s+Serial\s+Number:\s+(\S+)"),
|
||||
("systemfirmware", r"(?im)^\s*Firmware\s+Version:\s+(.*)"),
|
||||
|
@ -3431,14 +3421,14 @@ def default_gateway():
|
|||
for line in out.splitlines():
|
||||
if line.startswith("default"):
|
||||
grains["ip_gw"] = True
|
||||
grains["ip{}_gw".format(ip_version)] = True
|
||||
grains[f"ip{ip_version}_gw"] = True
|
||||
try:
|
||||
via, gw_ip = line.split()[1:3]
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if via == "via":
|
||||
grains["ip{}_gw".format(ip_version)] = gw_ip
|
||||
grains[f"ip{ip_version}_gw"] = gw_ip
|
||||
break
|
||||
except Exception: # pylint: disable=broad-except
|
||||
continue
|
||||
|
|
|
@ -8,13 +8,11 @@ These functions are not designed to be called directly, but instead from the
|
|||
:mod:`docker <salt.modules.docker>` execution modules. They provide for
|
||||
common logic to be re-used for common actions.
|
||||
"""
|
||||
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import pipes
|
||||
import shlex
|
||||
import time
|
||||
import traceback
|
||||
|
||||
|
@ -68,14 +66,14 @@ def _nsenter(pid):
|
|||
"""
|
||||
Return the nsenter command to attach to the named container
|
||||
"""
|
||||
return "nsenter --target {} --mount --uts --ipc --net --pid".format(pid)
|
||||
return f"nsenter --target {pid} --mount --uts --ipc --net --pid"
|
||||
|
||||
|
||||
def _get_md5(name, path, run_func):
|
||||
"""
|
||||
Get the MD5 checksum of a file from a container
|
||||
"""
|
||||
output = run_func(name, "md5sum {}".format(pipes.quote(path)), ignore_retcode=True)[
|
||||
output = run_func(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True)[
|
||||
"stdout"
|
||||
]
|
||||
try:
|
||||
|
@ -102,10 +100,10 @@ def cache_file(source):
|
|||
if source.startswith("salt://"):
|
||||
cached_source = __salt__["cp.cache_file"](source)
|
||||
if not cached_source:
|
||||
raise CommandExecutionError("Unable to cache {}".format(source))
|
||||
raise CommandExecutionError(f"Unable to cache {source}")
|
||||
return cached_source
|
||||
except AttributeError:
|
||||
raise SaltInvocationError("Invalid source file {}".format(source))
|
||||
raise SaltInvocationError(f"Invalid source file {source}")
|
||||
return source
|
||||
|
||||
|
||||
|
@ -164,55 +162,47 @@ def run(
|
|||
if exec_driver == "lxc-attach":
|
||||
full_cmd = "lxc-attach "
|
||||
if path:
|
||||
full_cmd += "-P {} ".format(pipes.quote(path))
|
||||
full_cmd += f"-P {shlex.quote(path)} "
|
||||
if keep_env is not True:
|
||||
full_cmd += "--clear-env "
|
||||
if "PATH" not in to_keep:
|
||||
full_cmd += "--set-var {} ".format(PATH)
|
||||
full_cmd += f"--set-var {PATH} "
|
||||
# --clear-env results in a very restrictive PATH
|
||||
# (/bin:/usr/bin), use a good fallback.
|
||||
full_cmd += " ".join(
|
||||
[
|
||||
"--set-var {}={}".format(x, pipes.quote(os.environ[x]))
|
||||
f"--set-var {x}={shlex.quote(os.environ[x])}"
|
||||
for x in to_keep
|
||||
if x in os.environ
|
||||
]
|
||||
)
|
||||
full_cmd += " -n {} -- {}".format(pipes.quote(name), cmd)
|
||||
full_cmd += f" -n {shlex.quote(name)} -- {cmd}"
|
||||
elif exec_driver == "nsenter":
|
||||
pid = __salt__["{}.pid".format(container_type)](name)
|
||||
full_cmd = "nsenter --target {} --mount --uts --ipc --net --pid -- ".format(pid)
|
||||
pid = __salt__[f"{container_type}.pid"](name)
|
||||
full_cmd = f"nsenter --target {pid} --mount --uts --ipc --net --pid -- "
|
||||
if keep_env is not True:
|
||||
full_cmd += "env -i "
|
||||
if "PATH" not in to_keep:
|
||||
full_cmd += "{} ".format(PATH)
|
||||
full_cmd += f"{PATH} "
|
||||
full_cmd += " ".join(
|
||||
[
|
||||
"{}={}".format(x, pipes.quote(os.environ[x]))
|
||||
for x in to_keep
|
||||
if x in os.environ
|
||||
]
|
||||
[f"{x}={shlex.quote(os.environ[x])}" for x in to_keep if x in os.environ]
|
||||
)
|
||||
full_cmd += " {}".format(cmd)
|
||||
full_cmd += f" {cmd}"
|
||||
elif exec_driver == "docker-exec":
|
||||
# We're using docker exec on the CLI as opposed to via docker-py, since
|
||||
# the Docker API doesn't return stdout and stderr separately.
|
||||
full_cmd = "docker exec "
|
||||
if stdin:
|
||||
full_cmd += "-i "
|
||||
full_cmd += "{} ".format(name)
|
||||
full_cmd += f"{name} "
|
||||
if keep_env is not True:
|
||||
full_cmd += "env -i "
|
||||
if "PATH" not in to_keep:
|
||||
full_cmd += "{} ".format(PATH)
|
||||
full_cmd += f"{PATH} "
|
||||
full_cmd += " ".join(
|
||||
[
|
||||
"{}={}".format(x, pipes.quote(os.environ[x]))
|
||||
for x in to_keep
|
||||
if x in os.environ
|
||||
]
|
||||
[f"{x}={shlex.quote(os.environ[x])}" for x in to_keep if x in os.environ]
|
||||
)
|
||||
full_cmd += " {}".format(cmd)
|
||||
full_cmd += f" {cmd}"
|
||||
|
||||
if not use_vt:
|
||||
ret = __salt__[cmd_func](
|
||||
|
@ -299,13 +289,13 @@ def copy_to(
|
|||
salt myminion container_resource.copy_to mycontainer /local/file/path /container/file/path container_type=docker exec_driver=nsenter
|
||||
"""
|
||||
# Get the appropriate functions
|
||||
state = __salt__["{}.state".format(container_type)]
|
||||
state = __salt__[f"{container_type}.state"]
|
||||
|
||||
def run_all(*args, **akwargs):
|
||||
akwargs = copy.deepcopy(akwargs)
|
||||
if container_type in ["lxc"] and "path" not in akwargs:
|
||||
akwargs["path"] = path
|
||||
return __salt__["{}.run_all".format(container_type)](*args, **akwargs)
|
||||
return __salt__[f"{container_type}.run_all"](*args, **akwargs)
|
||||
|
||||
state_kwargs = {}
|
||||
cmd_kwargs = {"ignore_retcode": True}
|
||||
|
@ -321,7 +311,7 @@ def copy_to(
|
|||
|
||||
c_state = _state(name)
|
||||
if c_state != "running":
|
||||
raise CommandExecutionError("Container '{}' is not running".format(name))
|
||||
raise CommandExecutionError(f"Container '{name}' is not running")
|
||||
|
||||
local_file = cache_file(source)
|
||||
source_dir, source_name = os.path.split(local_file)
|
||||
|
@ -330,17 +320,14 @@ def copy_to(
|
|||
if not os.path.isabs(local_file):
|
||||
raise SaltInvocationError("Source path must be absolute")
|
||||
elif not os.path.exists(local_file):
|
||||
raise SaltInvocationError("Source file {} does not exist".format(local_file))
|
||||
raise SaltInvocationError(f"Source file {local_file} does not exist")
|
||||
elif not os.path.isfile(local_file):
|
||||
raise SaltInvocationError("Source must be a regular file")
|
||||
|
||||
# Destination file sanity checks
|
||||
if not os.path.isabs(dest):
|
||||
raise SaltInvocationError("Destination path must be absolute")
|
||||
if (
|
||||
run_all(name, "test -d {}".format(pipes.quote(dest)), **cmd_kwargs)["retcode"]
|
||||
== 0
|
||||
):
|
||||
if run_all(name, f"test -d {shlex.quote(dest)}", **cmd_kwargs)["retcode"] == 0:
|
||||
# Destination is a directory, full path to dest file will include the
|
||||
# basename of the source file.
|
||||
dest = os.path.join(dest, source_name)
|
||||
|
@ -350,14 +337,12 @@ def copy_to(
|
|||
# parent directory.
|
||||
dest_dir, dest_name = os.path.split(dest)
|
||||
if (
|
||||
run_all(name, "test -d {}".format(pipes.quote(dest_dir)), **cmd_kwargs)[
|
||||
"retcode"
|
||||
]
|
||||
run_all(name, f"test -d {shlex.quote(dest_dir)}", **cmd_kwargs)["retcode"]
|
||||
!= 0
|
||||
):
|
||||
if makedirs:
|
||||
result = run_all(
|
||||
name, "mkdir -p {}".format(pipes.quote(dest_dir)), **cmd_kwargs
|
||||
name, f"mkdir -p {shlex.quote(dest_dir)}", **cmd_kwargs
|
||||
)
|
||||
if result["retcode"] != 0:
|
||||
error = (
|
||||
|
@ -375,10 +360,7 @@ def copy_to(
|
|||
)
|
||||
if (
|
||||
not overwrite
|
||||
and run_all(name, "test -e {}".format(pipes.quote(dest)), **cmd_kwargs)[
|
||||
"retcode"
|
||||
]
|
||||
== 0
|
||||
and run_all(name, f"test -e {shlex.quote(dest)}", **cmd_kwargs)["retcode"] == 0
|
||||
):
|
||||
raise CommandExecutionError(
|
||||
"Destination path {} already exists. Use overwrite=True to "
|
||||
|
@ -401,14 +383,14 @@ def copy_to(
|
|||
if exec_driver == "lxc-attach":
|
||||
lxcattach = "lxc-attach"
|
||||
if path:
|
||||
lxcattach += " -P {}".format(pipes.quote(path))
|
||||
lxcattach += f" -P {shlex.quote(path)}"
|
||||
copy_cmd = (
|
||||
'cat "{0}" | {4} --clear-env --set-var {1} -n {2} -- tee "{3}"'.format(
|
||||
local_file, PATH, name, dest, lxcattach
|
||||
)
|
||||
)
|
||||
elif exec_driver == "nsenter":
|
||||
pid = __salt__["{}.pid".format(container_type)](name)
|
||||
pid = __salt__[f"{container_type}.pid"](name)
|
||||
copy_cmd = 'cat "{}" | {} env -i {} tee "{}"'.format(
|
||||
local_file, _nsenter(pid), PATH, dest
|
||||
)
|
||||
|
|
|
@ -2,10 +2,8 @@
|
|||
Module to provide Postgres compatibility to salt for debian family specific tools.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
import pipes
|
||||
import shlex
|
||||
|
||||
import salt.utils.path
|
||||
|
||||
|
@ -76,7 +74,7 @@ def cluster_create(
|
|||
cmd += ["--data-checksums"]
|
||||
if wal_segsize:
|
||||
cmd += ["--wal-segsize", wal_segsize]
|
||||
cmdstr = " ".join([pipes.quote(c) for c in cmd])
|
||||
cmdstr = " ".join([shlex.quote(c) for c in cmd])
|
||||
ret = __salt__["cmd.run_all"](cmdstr, python_shell=False)
|
||||
if ret.get("retcode", 0) != 0:
|
||||
log.error("Error creating a Postgresql cluster %s/%s", version, name)
|
||||
|
@ -97,7 +95,7 @@ def cluster_list(verbose=False):
|
|||
salt '*' postgres.cluster_list verbose=True
|
||||
"""
|
||||
cmd = [salt.utils.path.which("pg_lsclusters"), "--no-header"]
|
||||
ret = __salt__["cmd.run_all"](" ".join([pipes.quote(c) for c in cmd]))
|
||||
ret = __salt__["cmd.run_all"](" ".join([shlex.quote(c) for c in cmd]))
|
||||
if ret.get("retcode", 0) != 0:
|
||||
log.error("Error listing clusters")
|
||||
cluster_dict = _parse_pg_lscluster(ret["stdout"])
|
||||
|
@ -118,7 +116,7 @@ def cluster_exists(version, name="main"):
|
|||
|
||||
salt '*' postgres.cluster_exists '9.3' 'main'
|
||||
"""
|
||||
return "{}/{}".format(version, name) in cluster_list()
|
||||
return f"{version}/{name}" in cluster_list()
|
||||
|
||||
|
||||
def cluster_remove(version, name="main", stop=False):
|
||||
|
@ -141,13 +139,13 @@ def cluster_remove(version, name="main", stop=False):
|
|||
if stop:
|
||||
cmd += ["--stop"]
|
||||
cmd += [str(version), name]
|
||||
cmdstr = " ".join([pipes.quote(c) for c in cmd])
|
||||
cmdstr = " ".join([shlex.quote(c) for c in cmd])
|
||||
ret = __salt__["cmd.run_all"](cmdstr, python_shell=False)
|
||||
# FIXME - return Boolean ?
|
||||
if ret.get("retcode", 0) != 0:
|
||||
log.error("Error removing a Postgresql cluster %s/%s", version, name)
|
||||
else:
|
||||
ret["changes"] = "Successfully removed cluster {}/{}".format(version, name)
|
||||
ret["changes"] = f"Successfully removed cluster {version}/{name}"
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -158,7 +156,7 @@ def _parse_pg_lscluster(output):
|
|||
cluster_dict = {}
|
||||
for line in output.splitlines():
|
||||
version, name, port, status, user, datadir, log = line.split()
|
||||
cluster_dict["{}/{}".format(version, name)] = {
|
||||
cluster_dict[f"{version}/{name}"] = {
|
||||
"port": int(port),
|
||||
"status": status,
|
||||
"user": user,
|
||||
|
|
|
@ -204,8 +204,8 @@ import gzip
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import pipes
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
|
@ -252,7 +252,6 @@ except ImportError:
|
|||
|
||||
HAS_NSENTER = bool(salt.utils.path.which("nsenter"))
|
||||
|
||||
# Set up logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Don't shadow built-in's.
|
||||
|
@ -392,7 +391,7 @@ def _get_client(timeout=NOTSET, **kwargs):
|
|||
)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise CommandExecutionError(
|
||||
"Docker machine {} failed: {}".format(docker_machine, exc)
|
||||
f"Docker machine {docker_machine} failed: {exc}"
|
||||
)
|
||||
try:
|
||||
# docker-py 2.0 renamed this client attribute
|
||||
|
@ -492,7 +491,7 @@ def _change_state(name, action, expected, *args, **kwargs):
|
|||
return {
|
||||
"result": False,
|
||||
"state": {"old": expected, "new": expected},
|
||||
"comment": "Container '{}' already {}".format(name, expected),
|
||||
"comment": f"Container '{name}' already {expected}",
|
||||
}
|
||||
_client_wrapper(action, name, *args, **kwargs)
|
||||
_clear_context()
|
||||
|
@ -530,9 +529,7 @@ def _get_md5(name, path):
|
|||
"""
|
||||
Get the MD5 checksum of a file from a container
|
||||
"""
|
||||
output = run_stdout(
|
||||
name, "md5sum {}".format(pipes.quote(path)), ignore_retcode=True
|
||||
)
|
||||
output = run_stdout(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True)
|
||||
try:
|
||||
return output.split()[0]
|
||||
except IndexError:
|
||||
|
@ -611,7 +608,7 @@ def _scrub_links(links, name):
|
|||
if isinstance(links, list):
|
||||
ret = []
|
||||
for l in links:
|
||||
ret.append(l.replace("/{}/".format(name), "/", 1))
|
||||
ret.append(l.replace(f"/{name}/", "/", 1))
|
||||
else:
|
||||
ret = links
|
||||
|
||||
|
@ -634,11 +631,11 @@ def _size_fmt(num):
|
|||
try:
|
||||
num = int(num)
|
||||
if num < 1024:
|
||||
return "{} bytes".format(num)
|
||||
return f"{num} bytes"
|
||||
num /= 1024.0
|
||||
for unit in ("KiB", "MiB", "GiB", "TiB", "PiB"):
|
||||
if num < 1024.0:
|
||||
return "{:3.1f} {}".format(num, unit)
|
||||
return f"{num:3.1f} {unit}"
|
||||
num /= 1024.0
|
||||
except Exception: # pylint: disable=broad-except
|
||||
log.error("Unable to format file size for '%s'", num)
|
||||
|
@ -653,7 +650,7 @@ def _client_wrapper(attr, *args, **kwargs):
|
|||
catch_api_errors = kwargs.pop("catch_api_errors", True)
|
||||
func = getattr(__context__["docker.client"], attr, None)
|
||||
if func is None or not hasattr(func, "__call__"):
|
||||
raise SaltInvocationError("Invalid client action '{}'".format(attr))
|
||||
raise SaltInvocationError(f"Invalid client action '{attr}'")
|
||||
if attr in ("push", "pull"):
|
||||
try:
|
||||
# Refresh auth config from config.json
|
||||
|
@ -673,7 +670,7 @@ def _client_wrapper(attr, *args, **kwargs):
|
|||
if catch_api_errors:
|
||||
# Generic handling of Docker API errors
|
||||
raise CommandExecutionError(
|
||||
"Error {}: {}".format(exc.response.status_code, exc.explanation)
|
||||
f"Error {exc.response.status_code}: {exc.explanation}"
|
||||
)
|
||||
else:
|
||||
# Allow API errors to be caught further up the stack
|
||||
|
@ -688,9 +685,9 @@ def _client_wrapper(attr, *args, **kwargs):
|
|||
|
||||
# If we're here, it's because an exception was caught earlier, and the
|
||||
# API command failed.
|
||||
msg = "Unable to perform {}".format(attr)
|
||||
msg = f"Unable to perform {attr}"
|
||||
if err:
|
||||
msg += ": {}".format(err)
|
||||
msg += f": {err}"
|
||||
raise CommandExecutionError(msg)
|
||||
|
||||
|
||||
|
@ -717,7 +714,7 @@ def _import_status(data, item, repo_name, repo_tag):
|
|||
return
|
||||
elif all(x in string.hexdigits for x in status):
|
||||
# Status is an image ID
|
||||
data["Image"] = "{}:{}".format(repo_name, repo_tag)
|
||||
data["Image"] = f"{repo_name}:{repo_tag}"
|
||||
data["Id"] = status
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
||||
|
@ -876,7 +873,7 @@ def _get_create_kwargs(
|
|||
ignore_collisions=False,
|
||||
validate_ip_addrs=True,
|
||||
client_args=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Take input kwargs and return a kwargs dict to pass to docker-py's
|
||||
|
@ -894,7 +891,7 @@ def _get_create_kwargs(
|
|||
skip_translate=skip_translate,
|
||||
ignore_collisions=ignore_collisions,
|
||||
validate_ip_addrs=validate_ip_addrs,
|
||||
**__utils__["args.clean_kwargs"](**kwargs)
|
||||
**__utils__["args.clean_kwargs"](**kwargs),
|
||||
)
|
||||
|
||||
if networks:
|
||||
|
@ -907,7 +904,7 @@ def _get_create_kwargs(
|
|||
log.error(
|
||||
"docker.create: Error getting client args: '%s'", exc, exc_info=True
|
||||
)
|
||||
raise CommandExecutionError("Failed to get client args: {}".format(exc))
|
||||
raise CommandExecutionError(f"Failed to get client args: {exc}")
|
||||
|
||||
full_host_config = {}
|
||||
host_kwargs = {}
|
||||
|
@ -1468,15 +1465,15 @@ def login(*registries):
|
|||
results = ret.setdefault("Results", {})
|
||||
for registry in registries:
|
||||
if registry not in registry_auth:
|
||||
errors.append("No match found for registry '{}'".format(registry))
|
||||
errors.append(f"No match found for registry '{registry}'")
|
||||
continue
|
||||
try:
|
||||
username = registry_auth[registry]["username"]
|
||||
password = registry_auth[registry]["password"]
|
||||
except TypeError:
|
||||
errors.append("Invalid configuration for registry '{}'".format(registry))
|
||||
errors.append(f"Invalid configuration for registry '{registry}'")
|
||||
except KeyError as exc:
|
||||
errors.append("Missing {} for registry '{}'".format(exc, registry))
|
||||
errors.append(f"Missing {exc} for registry '{registry}'")
|
||||
else:
|
||||
cmd = ["docker", "login", "-u", username, "-p", password]
|
||||
if registry.lower() != "hub":
|
||||
|
@ -1562,7 +1559,7 @@ def logout(*registries):
|
|||
results = ret.setdefault("Results", {})
|
||||
for registry in registries:
|
||||
if registry not in registry_auth:
|
||||
errors.append("No match found for registry '{}'".format(registry))
|
||||
errors.append(f"No match found for registry '{registry}'")
|
||||
continue
|
||||
else:
|
||||
cmd = ["docker", "logout"]
|
||||
|
@ -1684,7 +1681,7 @@ def exists(name):
|
|||
|
||||
salt myminion docker.exists mycontainer
|
||||
"""
|
||||
contextkey = "docker.exists.{}".format(name)
|
||||
contextkey = f"docker.exists.{name}"
|
||||
if contextkey in __context__:
|
||||
return __context__[contextkey]
|
||||
try:
|
||||
|
@ -1775,7 +1772,7 @@ def history(name, quiet=False):
|
|||
)
|
||||
for param in ("Size",):
|
||||
if param in step:
|
||||
step["{}_Human".format(param)] = _size_fmt(step[param])
|
||||
step[f"{param}_Human"] = _size_fmt(step[param])
|
||||
ret.append(copy.deepcopy(step))
|
||||
if quiet:
|
||||
return [x.get("Command") for x in ret]
|
||||
|
@ -1837,9 +1834,7 @@ def images(verbose=False, **kwargs):
|
|||
)
|
||||
for param in ("Size", "VirtualSize"):
|
||||
if param in bucket.get(img_id, {}):
|
||||
bucket[img_id]["{}_Human".format(param)] = _size_fmt(
|
||||
bucket[img_id][param]
|
||||
)
|
||||
bucket[img_id][f"{param}_Human"] = _size_fmt(bucket[img_id][param])
|
||||
|
||||
context_data = __context__.get("docker.images", {})
|
||||
ret = copy.deepcopy(context_data.get("tagged", {}))
|
||||
|
@ -1922,7 +1917,7 @@ def inspect(name):
|
|||
raise
|
||||
|
||||
raise CommandExecutionError(
|
||||
"Error 404: No such image/container/volume/network: {}".format(name)
|
||||
f"Error 404: No such image/container/volume/network: {name}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -1978,7 +1973,7 @@ def inspect_image(name):
|
|||
ret = _client_wrapper("inspect_image", name)
|
||||
for param in ("Size", "VirtualSize"):
|
||||
if param in ret:
|
||||
ret["{}_Human".format(param)] = _size_fmt(ret[param])
|
||||
ret[f"{param}_Human"] = _size_fmt(ret[param])
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -2272,7 +2267,7 @@ def port(name, private_port=None):
|
|||
else:
|
||||
# Sanity checks
|
||||
if isinstance(private_port, int):
|
||||
pattern = "{}/*".format(private_port)
|
||||
pattern = f"{private_port}/*"
|
||||
else:
|
||||
err = (
|
||||
"Invalid private_port '{}'. Must either be a port number, "
|
||||
|
@ -2393,7 +2388,7 @@ def state(name):
|
|||
|
||||
salt myminion docker.state mycontainer
|
||||
"""
|
||||
contextkey = "docker.state.{}".format(name)
|
||||
contextkey = f"docker.state.{name}"
|
||||
if contextkey in __context__:
|
||||
return __context__[contextkey]
|
||||
__context__[contextkey] = _get_state(inspect_container(name))
|
||||
|
@ -2433,9 +2428,7 @@ def search(name, official=False, trusted=False):
|
|||
"""
|
||||
response = _client_wrapper("search", name)
|
||||
if not response:
|
||||
raise CommandExecutionError(
|
||||
"No images matched the search string '{}'".format(name)
|
||||
)
|
||||
raise CommandExecutionError(f"No images matched the search string '{name}'")
|
||||
|
||||
key_map = {
|
||||
"description": "Description",
|
||||
|
@ -2550,7 +2543,7 @@ def create(
|
|||
ignore_collisions=False,
|
||||
validate_ip_addrs=True,
|
||||
client_timeout=salt.utils.dockermod.CLIENT_TIMEOUT,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Create a new container
|
||||
|
@ -3276,7 +3269,7 @@ def create(
|
|||
skip_translate=skip_translate,
|
||||
ignore_collisions=ignore_collisions,
|
||||
validate_ip_addrs=validate_ip_addrs,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
if unused_kwargs:
|
||||
|
@ -3288,7 +3281,7 @@ def create(
|
|||
|
||||
log.debug(
|
||||
"docker.create: creating container %susing the following arguments: %s",
|
||||
"with name '{}' ".format(name) if name is not None else "",
|
||||
f"with name '{name}' " if name is not None else "",
|
||||
kwargs,
|
||||
)
|
||||
time_started = time.time()
|
||||
|
@ -3326,7 +3319,7 @@ def run_container(
|
|||
replace=False,
|
||||
force=False,
|
||||
networks=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
.. versionadded:: 2018.3.0
|
||||
|
@ -3428,7 +3421,7 @@ def run_container(
|
|||
skip_translate=skip_translate,
|
||||
ignore_collisions=ignore_collisions,
|
||||
validate_ip_addrs=validate_ip_addrs,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# _get_create_kwargs() will have processed auto_remove and put it into the
|
||||
|
@ -3453,7 +3446,7 @@ def run_container(
|
|||
|
||||
log.debug(
|
||||
"docker.create: creating container %susing the following arguments: %s",
|
||||
"with name '{}' ".format(name) if name is not None else "",
|
||||
f"with name '{name}' " if name is not None else "",
|
||||
kwargs,
|
||||
)
|
||||
|
||||
|
@ -3493,7 +3486,7 @@ def run_container(
|
|||
rm_(name)
|
||||
except CommandExecutionError as rm_exc:
|
||||
exc_info.setdefault("other_errors", []).append(
|
||||
"Failed to auto_remove container: {}".format(rm_exc)
|
||||
f"Failed to auto_remove container: {rm_exc}"
|
||||
)
|
||||
# Raise original exception with additional info
|
||||
raise CommandExecutionError(exc.__str__(), info=exc_info)
|
||||
|
@ -3588,7 +3581,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
|
|||
"""
|
||||
c_state = state(name)
|
||||
if c_state != "running":
|
||||
raise CommandExecutionError("Container '{}' is not running".format(name))
|
||||
raise CommandExecutionError(f"Container '{name}' is not running")
|
||||
|
||||
# Destination file sanity checks
|
||||
if not os.path.isabs(dest):
|
||||
|
@ -3614,9 +3607,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
|
|||
)
|
||||
)
|
||||
else:
|
||||
raise SaltInvocationError(
|
||||
"Directory {} does not exist".format(dest_dir)
|
||||
)
|
||||
raise SaltInvocationError(f"Directory {dest_dir} does not exist")
|
||||
if not overwrite and os.path.exists(dest):
|
||||
raise CommandExecutionError(
|
||||
"Destination path {} already exists. Use overwrite=True to "
|
||||
|
@ -3627,19 +3618,14 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
|
|||
if not os.path.isabs(source):
|
||||
raise SaltInvocationError("Source path must be absolute")
|
||||
else:
|
||||
if (
|
||||
retcode(name, "test -e {}".format(pipes.quote(source)), ignore_retcode=True)
|
||||
== 0
|
||||
):
|
||||
if retcode(name, f"test -e {shlex.quote(source)}", ignore_retcode=True) == 0:
|
||||
if (
|
||||
retcode(
|
||||
name, "test -f {}".format(pipes.quote(source)), ignore_retcode=True
|
||||
)
|
||||
retcode(name, f"test -f {shlex.quote(source)}", ignore_retcode=True)
|
||||
!= 0
|
||||
):
|
||||
raise SaltInvocationError("Source must be a regular file")
|
||||
else:
|
||||
raise SaltInvocationError("Source file {} does not exist".format(source))
|
||||
raise SaltInvocationError(f"Source file {source} does not exist")
|
||||
|
||||
# Before we try to replace the file, compare checksums.
|
||||
source_md5 = _get_md5(name, source)
|
||||
|
@ -3652,7 +3638,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
|
|||
try:
|
||||
src_path = ":".join((name, source))
|
||||
except TypeError:
|
||||
src_path = "{}:{}".format(name, source)
|
||||
src_path = f"{name}:{source}"
|
||||
cmd = ["docker", "cp", src_path, dest_dir]
|
||||
__salt__["cmd.run"](cmd, python_shell=False)
|
||||
return source_md5 == __salt__["file.get_sum"](dest, "md5")
|
||||
|
@ -3779,7 +3765,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
|
|||
salt myminion docker.export mycontainer /tmp/mycontainer.tar
|
||||
salt myminion docker.export mycontainer /tmp/mycontainer.tar.xz push=True
|
||||
"""
|
||||
err = "Path '{}' is not absolute".format(path)
|
||||
err = f"Path '{path}' is not absolute"
|
||||
try:
|
||||
if not os.path.isabs(path):
|
||||
raise SaltInvocationError(err)
|
||||
|
@ -3787,7 +3773,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
|
|||
raise SaltInvocationError(err)
|
||||
|
||||
if os.path.exists(path) and not overwrite:
|
||||
raise CommandExecutionError("{} already exists".format(path))
|
||||
raise CommandExecutionError(f"{path} already exists")
|
||||
|
||||
if compression is None:
|
||||
if path.endswith(".tar.gz") or path.endswith(".tgz"):
|
||||
|
@ -3810,7 +3796,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
|
|||
compression = "xz"
|
||||
|
||||
if compression and compression not in ("gzip", "bzip2", "xz"):
|
||||
raise SaltInvocationError("Invalid compression type '{}'".format(compression))
|
||||
raise SaltInvocationError(f"Invalid compression type '{compression}'")
|
||||
|
||||
parent_dir = os.path.dirname(path)
|
||||
if not os.path.isdir(parent_dir):
|
||||
|
@ -3823,16 +3809,14 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
|
|||
os.makedirs(parent_dir)
|
||||
except OSError as exc:
|
||||
raise CommandExecutionError(
|
||||
"Unable to make parent dir {}: {}".format(parent_dir, exc)
|
||||
f"Unable to make parent dir {parent_dir}: {exc}"
|
||||
)
|
||||
|
||||
if compression == "gzip":
|
||||
try:
|
||||
out = gzip.open(path, "wb")
|
||||
except OSError as exc:
|
||||
raise CommandExecutionError(
|
||||
"Unable to open {} for writing: {}".format(path, exc)
|
||||
)
|
||||
raise CommandExecutionError(f"Unable to open {path} for writing: {exc}")
|
||||
elif compression == "bzip2":
|
||||
compressor = bz2.BZ2Compressor()
|
||||
elif compression == "xz":
|
||||
|
@ -3870,9 +3854,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
|
|||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
raise CommandExecutionError(
|
||||
"Error occurred during container export: {}".format(exc)
|
||||
)
|
||||
raise CommandExecutionError(f"Error occurred during container export: {exc}")
|
||||
finally:
|
||||
out.close()
|
||||
ret = {"Time_Elapsed": time.time() - time_started}
|
||||
|
@ -4103,7 +4085,7 @@ def build(
|
|||
# For the build function in the low-level API, the "tag" refers to the full
|
||||
# tag (e.g. myuser/myimage:mytag). This is different than in other
|
||||
# functions, where the repo and tag are passed separately.
|
||||
image_tag = "{}:{}".format(repository, tag) if repository and tag else None
|
||||
image_tag = f"{repository}:{tag}" if repository and tag else None
|
||||
|
||||
time_started = time.time()
|
||||
response = _client_wrapper(
|
||||
|
@ -4122,7 +4104,7 @@ def build(
|
|||
|
||||
if not response:
|
||||
raise CommandExecutionError(
|
||||
"Build failed for {}, no response returned from Docker API".format(path)
|
||||
f"Build failed for {path}, no response returned from Docker API"
|
||||
)
|
||||
|
||||
stream_data = []
|
||||
|
@ -4145,7 +4127,7 @@ def build(
|
|||
if "Id" not in ret:
|
||||
# API returned information, but there was no confirmation of a
|
||||
# successful build.
|
||||
msg = "Build failed for {}".format(path)
|
||||
msg = f"Build failed for {path}"
|
||||
log.error(msg)
|
||||
log.error(stream_data)
|
||||
if errors:
|
||||
|
@ -4156,7 +4138,7 @@ def build(
|
|||
if resolved_tag:
|
||||
ret["Image"] = resolved_tag
|
||||
else:
|
||||
ret["Warning"] = "Failed to tag image as {}".format(image_tag)
|
||||
ret["Warning"] = f"Failed to tag image as {image_tag}"
|
||||
|
||||
if api_response:
|
||||
ret["API_Response"] = stream_data
|
||||
|
@ -4363,7 +4345,7 @@ def import_(source, repository, tag="latest", api_response=False):
|
|||
|
||||
if not response:
|
||||
raise CommandExecutionError(
|
||||
"Import failed for {}, no response returned from Docker API".format(source)
|
||||
f"Import failed for {source}, no response returned from Docker API"
|
||||
)
|
||||
elif api_response:
|
||||
ret["API_Response"] = response
|
||||
|
@ -4383,7 +4365,7 @@ def import_(source, repository, tag="latest", api_response=False):
|
|||
if "Id" not in ret:
|
||||
# API returned information, but there was no confirmation of a
|
||||
# successful push.
|
||||
msg = "Import failed for {}".format(source)
|
||||
msg = f"Import failed for {source}"
|
||||
if errors:
|
||||
msg += ". Error(s) follow:\n\n{}".format("\n\n".join(errors))
|
||||
raise CommandExecutionError(msg)
|
||||
|
@ -4458,7 +4440,7 @@ def load(path, repository=None, tag=None):
|
|||
|
||||
local_path = __salt__["container_resource.cache_file"](path)
|
||||
if not os.path.isfile(local_path):
|
||||
raise CommandExecutionError("Source file {} does not exist".format(path))
|
||||
raise CommandExecutionError(f"Source file {path} does not exist")
|
||||
|
||||
pre = images(all=True)
|
||||
cmd = ["docker", "load", "-i", local_path]
|
||||
|
@ -4468,7 +4450,7 @@ def load(path, repository=None, tag=None):
|
|||
_clear_context()
|
||||
post = images(all=True)
|
||||
if result["retcode"] != 0:
|
||||
msg = "Failed to load image(s) from {}".format(path)
|
||||
msg = f"Failed to load image(s) from {path}"
|
||||
if result["stderr"]:
|
||||
msg += ": {}".format(result["stderr"])
|
||||
raise CommandExecutionError(msg)
|
||||
|
@ -4489,7 +4471,7 @@ def load(path, repository=None, tag=None):
|
|||
# strings when passed (e.g. a numeric tag would be loaded as an int
|
||||
# or float), and because the tag_ function will stringify them if
|
||||
# need be, a str.format is the correct thing to do here.
|
||||
tagged_image = "{}:{}".format(repository, tag)
|
||||
tagged_image = f"{repository}:{tag}"
|
||||
try:
|
||||
result = tag_(top_level_images[0], repository=repository, tag=tag)
|
||||
ret["Image"] = tagged_image
|
||||
|
@ -4526,7 +4508,7 @@ def layers(name):
|
|||
):
|
||||
ret.append(line)
|
||||
if not ret:
|
||||
raise CommandExecutionError("Image '{}' not found".format(name))
|
||||
raise CommandExecutionError(f"Image '{name}' not found")
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -4597,7 +4579,7 @@ def pull(
|
|||
|
||||
if not response:
|
||||
raise CommandExecutionError(
|
||||
"Pull failed for {}, no response returned from Docker API".format(image)
|
||||
f"Pull failed for {image}, no response returned from Docker API"
|
||||
)
|
||||
elif api_response:
|
||||
ret["API_Response"] = response
|
||||
|
@ -4610,7 +4592,7 @@ def pull(
|
|||
event = salt.utils.json.loads(event)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise CommandExecutionError(
|
||||
"Unable to interpret API event: '{}'".format(event),
|
||||
f"Unable to interpret API event: '{event}'",
|
||||
info={"Error": exc.__str__()},
|
||||
)
|
||||
try:
|
||||
|
@ -4692,7 +4674,7 @@ def push(
|
|||
|
||||
if not response:
|
||||
raise CommandExecutionError(
|
||||
"Push failed for {}, no response returned from Docker API".format(image)
|
||||
f"Push failed for {image}, no response returned from Docker API"
|
||||
)
|
||||
elif api_response:
|
||||
ret["API_Response"] = response
|
||||
|
@ -4704,7 +4686,7 @@ def push(
|
|||
event = salt.utils.json.loads(event)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise CommandExecutionError(
|
||||
"Unable to interpret API event: '{}'".format(event),
|
||||
f"Unable to interpret API event: '{event}'",
|
||||
info={"Error": exc.__str__()},
|
||||
)
|
||||
try:
|
||||
|
@ -4784,9 +4766,7 @@ def rmi(*names, **kwargs):
|
|||
err += "image(s): {}".format(", ".join(deps["Images"]))
|
||||
errors.append(err)
|
||||
else:
|
||||
errors.append(
|
||||
"Error {}: {}".format(exc.response.status_code, exc.explanation)
|
||||
)
|
||||
errors.append(f"Error {exc.response.status_code}: {exc.explanation}")
|
||||
|
||||
_clear_context()
|
||||
ret = {
|
||||
|
@ -4874,7 +4854,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
|
|||
salt myminion docker.save centos:7 /tmp/cent7.tar
|
||||
salt myminion docker.save 0123456789ab cdef01234567 /tmp/saved.tar
|
||||
"""
|
||||
err = "Path '{}' is not absolute".format(path)
|
||||
err = f"Path '{path}' is not absolute"
|
||||
try:
|
||||
if not os.path.isabs(path):
|
||||
raise SaltInvocationError(err)
|
||||
|
@ -4882,7 +4862,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
|
|||
raise SaltInvocationError(err)
|
||||
|
||||
if os.path.exists(path) and not overwrite:
|
||||
raise CommandExecutionError("{} already exists".format(path))
|
||||
raise CommandExecutionError(f"{path} already exists")
|
||||
|
||||
if compression is None:
|
||||
if path.endswith(".tar.gz") or path.endswith(".tgz"):
|
||||
|
@ -4905,7 +4885,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
|
|||
compression = "xz"
|
||||
|
||||
if compression and compression not in ("gzip", "bzip2", "xz"):
|
||||
raise SaltInvocationError("Invalid compression type '{}'".format(compression))
|
||||
raise SaltInvocationError(f"Invalid compression type '{compression}'")
|
||||
|
||||
parent_dir = os.path.dirname(path)
|
||||
if not os.path.isdir(parent_dir):
|
||||
|
@ -4927,7 +4907,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
|
|||
time_started = time.time()
|
||||
result = __salt__["cmd.run_all"](cmd, python_shell=False)
|
||||
if result["retcode"] != 0:
|
||||
err = "Failed to save image(s) to {}".format(path)
|
||||
err = f"Failed to save image(s) to {path}"
|
||||
if result["stderr"]:
|
||||
err += ": {}".format(result["stderr"])
|
||||
raise CommandExecutionError(err)
|
||||
|
@ -4937,9 +4917,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
|
|||
try:
|
||||
out = gzip.open(path, "wb")
|
||||
except OSError as exc:
|
||||
raise CommandExecutionError(
|
||||
"Unable to open {} for writing: {}".format(path, exc)
|
||||
)
|
||||
raise CommandExecutionError(f"Unable to open {path} for writing: {exc}")
|
||||
elif compression == "bzip2":
|
||||
compressor = bz2.BZ2Compressor()
|
||||
elif compression == "xz":
|
||||
|
@ -4975,9 +4953,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
|
|||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
raise CommandExecutionError(
|
||||
"Error occurred during image save: {}".format(exc)
|
||||
)
|
||||
raise CommandExecutionError(f"Error occurred during image save: {exc}")
|
||||
finally:
|
||||
try:
|
||||
# Clean up temp file
|
||||
|
@ -5097,7 +5073,7 @@ def create_network(
|
|||
ignore_collisions=False,
|
||||
validate_ip_addrs=True,
|
||||
client_timeout=salt.utils.dockermod.CLIENT_TIMEOUT,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
.. versionchanged:: 2018.3.0
|
||||
|
@ -5337,7 +5313,7 @@ def create_network(
|
|||
skip_translate=skip_translate,
|
||||
ignore_collisions=ignore_collisions,
|
||||
validate_ip_addrs=validate_ip_addrs,
|
||||
**__utils__["args.clean_kwargs"](**kwargs)
|
||||
**__utils__["args.clean_kwargs"](**kwargs),
|
||||
)
|
||||
|
||||
if "ipam" not in kwargs:
|
||||
|
@ -5669,7 +5645,7 @@ def pause(name):
|
|||
return {
|
||||
"result": False,
|
||||
"state": {"old": orig_state, "new": orig_state},
|
||||
"comment": "Container '{}' is stopped, cannot pause".format(name),
|
||||
"comment": f"Container '{name}' is stopped, cannot pause",
|
||||
}
|
||||
return _change_state(name, "pause", "paused")
|
||||
|
||||
|
@ -5768,7 +5744,7 @@ def start_(name):
|
|||
return {
|
||||
"result": False,
|
||||
"state": {"old": orig_state, "new": orig_state},
|
||||
"comment": "Container '{}' is paused, cannot start".format(name),
|
||||
"comment": f"Container '{name}' is paused, cannot start",
|
||||
}
|
||||
|
||||
return _change_state(name, "start", "running")
|
||||
|
@ -5873,7 +5849,7 @@ def unpause(name):
|
|||
return {
|
||||
"result": False,
|
||||
"state": {"old": orig_state, "new": orig_state},
|
||||
"comment": "Container '{}' is stopped, cannot unpause".format(name),
|
||||
"comment": f"Container '{name}' is stopped, cannot unpause",
|
||||
}
|
||||
return _change_state(name, "unpause", "running")
|
||||
|
||||
|
@ -5922,7 +5898,7 @@ def wait(name, ignore_already_stopped=False, fail_on_exit_status=False):
|
|||
# Container doesn't exist anymore
|
||||
return {
|
||||
"result": ignore_already_stopped,
|
||||
"comment": "Container '{}' absent".format(name),
|
||||
"comment": f"Container '{name}' absent",
|
||||
}
|
||||
already_stopped = pre == "stopped"
|
||||
response = _client_wrapper("wait", name)
|
||||
|
@ -5946,7 +5922,7 @@ def wait(name, ignore_already_stopped=False, fail_on_exit_status=False):
|
|||
"exit_status": response,
|
||||
}
|
||||
if already_stopped:
|
||||
result["comment"] = "Container '{}' already stopped".format(name)
|
||||
result["comment"] = f"Container '{name}' already stopped"
|
||||
if fail_on_exit_status and result["result"]:
|
||||
result["result"] = result["exit_status"] == 0
|
||||
return result
|
||||
|
@ -5959,7 +5935,7 @@ def prune(
|
|||
build=False,
|
||||
volumes=False,
|
||||
system=None,
|
||||
**filters
|
||||
**filters,
|
||||
):
|
||||
"""
|
||||
.. versionadded:: 2019.2.0
|
||||
|
@ -6645,7 +6621,7 @@ def script_retcode(
|
|||
|
||||
|
||||
def _generate_tmp_path():
|
||||
return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6]))
|
||||
return os.path.join("/tmp", f"salt.docker.{uuid.uuid4().hex[:6]}")
|
||||
|
||||
|
||||
def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=""):
|
||||
|
@ -6780,7 +6756,7 @@ def call(name, function, *args, **kwargs):
|
|||
]
|
||||
+ list(args)
|
||||
+ [
|
||||
"{}={}".format(key, value)
|
||||
f"{key}={value}"
|
||||
for (key, value) in kwargs.items()
|
||||
if not key.startswith("__")
|
||||
]
|
||||
|
|
|
@ -41,7 +41,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxcluster' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
|
|
@ -41,7 +41,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxdatacenter' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
|
|
@ -66,7 +66,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxi' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
|
|
@ -43,7 +43,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxvm' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
|
|
@ -286,19 +286,13 @@ def adduser(name, username, root=None):
|
|||
Verifies if a valid username 'bar' as a member of an existing group 'foo',
|
||||
if not then adds it.
|
||||
"""
|
||||
on_redhat_5 = (
|
||||
__grains__.get("os_family") == "RedHat"
|
||||
and __grains__.get("osmajorrelease") == "5"
|
||||
)
|
||||
on_suse_11 = (
|
||||
__grains__.get("os_family") == "Suse"
|
||||
and __grains__.get("osmajorrelease") == "11"
|
||||
)
|
||||
|
||||
if __grains__["kernel"] == "Linux":
|
||||
if on_redhat_5:
|
||||
cmd = [_which("gpasswd"), "-a", username, name]
|
||||
elif on_suse_11:
|
||||
if on_suse_11:
|
||||
cmd = [_which("usermod"), "-A", name, username]
|
||||
else:
|
||||
cmd = [_which("gpasswd"), "--add", username, name]
|
||||
|
@ -336,10 +330,6 @@ def deluser(name, username, root=None):
|
|||
Removes a member user 'bar' from a group 'foo'. If group is not present
|
||||
then returns True.
|
||||
"""
|
||||
on_redhat_5 = (
|
||||
__grains__.get("os_family") == "RedHat"
|
||||
and __grains__.get("osmajorrelease") == "5"
|
||||
)
|
||||
on_suse_11 = (
|
||||
__grains__.get("os_family") == "Suse"
|
||||
and __grains__.get("osmajorrelease") == "11"
|
||||
|
@ -349,9 +339,7 @@ def deluser(name, username, root=None):
|
|||
try:
|
||||
if username in grp_info["members"]:
|
||||
if __grains__["kernel"] == "Linux":
|
||||
if on_redhat_5:
|
||||
cmd = [_which("gpasswd"), "-d", username, name]
|
||||
elif on_suse_11:
|
||||
if on_suse_11:
|
||||
cmd = [_which("usermod"), "-R", name, username]
|
||||
else:
|
||||
cmd = [_which("gpasswd"), "--del", username, name]
|
||||
|
@ -400,19 +388,13 @@ def members(name, members_list, root=None):
|
|||
Replaces a membership list for a local group 'foo'.
|
||||
foo:x:1234:user1,user2,user3,...
|
||||
"""
|
||||
on_redhat_5 = (
|
||||
__grains__.get("os_family") == "RedHat"
|
||||
and __grains__.get("osmajorrelease") == "5"
|
||||
)
|
||||
on_suse_11 = (
|
||||
__grains__.get("os_family") == "Suse"
|
||||
and __grains__.get("osmajorrelease") == "11"
|
||||
)
|
||||
|
||||
if __grains__["kernel"] == "Linux":
|
||||
if on_redhat_5:
|
||||
cmd = [_which("gpasswd"), "-M", members_list, name]
|
||||
elif on_suse_11:
|
||||
if on_suse_11:
|
||||
for old_member in __salt__["group.info"](name).get("members"):
|
||||
__salt__["cmd.run"](
|
||||
"{} -R {} {}".format(_which("groupmod"), old_member, name),
|
||||
|
|
|
@ -12,9 +12,9 @@ import datetime
|
|||
import difflib
|
||||
import logging
|
||||
import os
|
||||
import pipes
|
||||
import random
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import string
|
||||
import tempfile
|
||||
|
@ -1834,7 +1834,7 @@ def _after_ignition_network_profile(cmd, ret, name, network_profile, path, nic_o
|
|||
# destroy the container if it was partially created
|
||||
cmd = "lxc-destroy"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
cmd += f" -n {name}"
|
||||
__salt__["cmd.retcode"](cmd, python_shell=False)
|
||||
raise CommandExecutionError(
|
||||
|
@ -1997,7 +1997,7 @@ def create(
|
|||
)
|
||||
options["imgtar"] = img_tar
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
if config:
|
||||
|
@ -2136,7 +2136,7 @@ def clone(name, orig, profile=None, network_profile=None, nic_opts=None, **kwarg
|
|||
cmd = "lxc-clone"
|
||||
cmd += f" {snapshot} -o {orig} -n {name}"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
if backing:
|
||||
|
@ -2184,7 +2184,7 @@ def ls_(active=None, cache=True, path=None):
|
|||
ret = []
|
||||
cmd = "lxc-ls"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
if active:
|
||||
cmd += " --active"
|
||||
output = __salt__["cmd.run_stdout"](cmd, python_shell=False)
|
||||
|
@ -2240,7 +2240,7 @@ def list_(extra=False, limit=None, path=None):
|
|||
for container in ctnrs:
|
||||
cmd = "lxc-info"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
cmd += f" -n {container}"
|
||||
c_info = __salt__["cmd.run"](cmd, python_shell=False, output_loglevel="debug")
|
||||
c_state = None
|
||||
|
@ -2299,12 +2299,12 @@ def _change_state(
|
|||
# Kill the container first
|
||||
scmd = "lxc-stop"
|
||||
if path:
|
||||
scmd += f" -P {pipes.quote(path)}"
|
||||
scmd += f" -P {shlex.quote(path)}"
|
||||
scmd += f" -k -n {name}"
|
||||
__salt__["cmd.run"](scmd, python_shell=False)
|
||||
|
||||
if path and " -P " not in cmd:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
cmd += f" -n {name}"
|
||||
|
||||
# certain lxc commands need to be taken with care (lxc-start)
|
||||
|
@ -2335,7 +2335,7 @@ def _change_state(
|
|||
# some commands do not wait, so we will
|
||||
rcmd = "lxc-wait"
|
||||
if path:
|
||||
rcmd += f" -P {pipes.quote(path)}"
|
||||
rcmd += f" -P {shlex.quote(path)}"
|
||||
rcmd += f" -n {name} -s {expected.upper()}"
|
||||
__salt__["cmd.run"](rcmd, python_shell=False, timeout=30)
|
||||
_clear_context()
|
||||
|
@ -2457,7 +2457,7 @@ def start(name, **kwargs):
|
|||
lxc_config = os.path.join(cpath, name, "config")
|
||||
# we try to start, even without config, if global opts are there
|
||||
if os.path.exists(lxc_config):
|
||||
cmd += f" -f {pipes.quote(lxc_config)}"
|
||||
cmd += f" -f {shlex.quote(lxc_config)}"
|
||||
cmd += " -d"
|
||||
_ensure_exists(name, path=path)
|
||||
if state(name, path=path) == "frozen":
|
||||
|
@ -2560,7 +2560,7 @@ def freeze(name, **kwargs):
|
|||
start(name, path=path)
|
||||
cmd = "lxc-freeze"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
ret = _change_state(cmd, name, "frozen", use_vt=use_vt, path=path)
|
||||
if orig_state == "stopped" and start_:
|
||||
ret["state"]["old"] = orig_state
|
||||
|
@ -2595,7 +2595,7 @@ def unfreeze(name, path=None, use_vt=None):
|
|||
raise CommandExecutionError(f"Container '{name}' is stopped")
|
||||
cmd = "lxc-unfreeze"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
return _change_state(cmd, name, "running", path=path, use_vt=use_vt)
|
||||
|
||||
|
||||
|
@ -2689,7 +2689,7 @@ def state(name, path=None):
|
|||
else:
|
||||
cmd = "lxc-info"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
cmd += f" -n {name}"
|
||||
ret = __salt__["cmd.run_all"](cmd, python_shell=False)
|
||||
if ret["retcode"] != 0:
|
||||
|
@ -2727,7 +2727,7 @@ def get_parameter(name, parameter, path=None):
|
|||
_ensure_exists(name, path=path)
|
||||
cmd = "lxc-cgroup"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
cmd += f" -n {name} {parameter}"
|
||||
ret = __salt__["cmd.run_all"](cmd, python_shell=False)
|
||||
if ret["retcode"] != 0:
|
||||
|
@ -2756,7 +2756,7 @@ def set_parameter(name, parameter, value, path=None):
|
|||
|
||||
cmd = "lxc-cgroup"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
cmd += f" -n {name} {parameter} {value}"
|
||||
ret = __salt__["cmd.run_all"](cmd, python_shell=False)
|
||||
if ret["retcode"] != 0:
|
||||
|
@ -3648,7 +3648,7 @@ def attachable(name, path=None):
|
|||
log.debug("Checking if LXC container %s is attachable", name)
|
||||
cmd = "lxc-attach"
|
||||
if path:
|
||||
cmd += f" -P {pipes.quote(path)}"
|
||||
cmd += f" -P {shlex.quote(path)}"
|
||||
cmd += f" --clear-env -n {name} -- /usr/bin/env"
|
||||
result = (
|
||||
__salt__["cmd.retcode"](
|
||||
|
|
|
@ -11,20 +11,6 @@ import shlex
|
|||
|
||||
import salt.utils.platform
|
||||
|
||||
try:
|
||||
import pipes
|
||||
|
||||
HAS_DEPS = True
|
||||
except ImportError:
|
||||
HAS_DEPS = False
|
||||
|
||||
if hasattr(shlex, "quote"):
|
||||
_quote = shlex.quote
|
||||
elif HAS_DEPS and hasattr(pipes, "quote"):
|
||||
_quote = pipes.quote
|
||||
else:
|
||||
_quote = None
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = "keychain"
|
||||
|
@ -34,7 +20,7 @@ def __virtual__():
|
|||
"""
|
||||
Only work on Mac OS
|
||||
"""
|
||||
if salt.utils.platform.is_darwin() and _quote is not None:
|
||||
if salt.utils.platform.is_darwin():
|
||||
return __virtualname__
|
||||
return (False, "Only available on Mac OS systems with pipes")
|
||||
|
||||
|
@ -82,7 +68,7 @@ def install(
|
|||
if keychain_password is not None:
|
||||
unlock_keychain(keychain, keychain_password)
|
||||
|
||||
cmd = "security import {} -P {} -k {}".format(cert, password, keychain)
|
||||
cmd = f"security import {cert} -P {password} -k {keychain}"
|
||||
if allow_any:
|
||||
cmd += " -A"
|
||||
return __salt__["cmd.run"](cmd)
|
||||
|
@ -117,7 +103,7 @@ def uninstall(
|
|||
if keychain_password is not None:
|
||||
unlock_keychain(keychain, keychain_password)
|
||||
|
||||
cmd = 'security delete-certificate -c "{}" {}'.format(cert_name, keychain)
|
||||
cmd = f'security delete-certificate -c "{cert_name}" {keychain}'
|
||||
return __salt__["cmd.run"](cmd)
|
||||
|
||||
|
||||
|
@ -137,7 +123,7 @@ def list_certs(keychain="/Library/Keychains/System.keychain"):
|
|||
"""
|
||||
cmd = (
|
||||
'security find-certificate -a {} | grep -o "alis".*\\" | '
|
||||
"grep -o '\\\"[-A-Za-z0-9.:() ]*\\\"'".format(_quote(keychain))
|
||||
"grep -o '\\\"[-A-Za-z0-9.:() ]*\\\"'".format(shlex.quote(keychain))
|
||||
)
|
||||
out = __salt__["cmd.run"](cmd, python_shell=True)
|
||||
return out.replace('"', "").split("\n")
|
||||
|
@ -165,7 +151,7 @@ def get_friendly_name(cert, password):
|
|||
"""
|
||||
cmd = (
|
||||
"openssl pkcs12 -in {} -passin pass:{} -info -nodes -nokeys 2> /dev/null | "
|
||||
"grep friendlyName:".format(_quote(cert), _quote(password))
|
||||
"grep friendlyName:".format(shlex.quote(cert), shlex.quote(password))
|
||||
)
|
||||
out = __salt__["cmd.run"](cmd, python_shell=True)
|
||||
return out.replace("friendlyName: ", "").strip()
|
||||
|
@ -187,7 +173,7 @@ def get_default_keychain(user=None, domain="user"):
|
|||
|
||||
salt '*' keychain.get_default_keychain
|
||||
"""
|
||||
cmd = "security default-keychain -d {}".format(domain)
|
||||
cmd = f"security default-keychain -d {domain}"
|
||||
return __salt__["cmd.run"](cmd, runas=user)
|
||||
|
||||
|
||||
|
@ -210,7 +196,7 @@ def set_default_keychain(keychain, domain="user", user=None):
|
|||
|
||||
salt '*' keychain.set_keychain /Users/fred/Library/Keychains/login.keychain
|
||||
"""
|
||||
cmd = "security default-keychain -d {} -s {}".format(domain, keychain)
|
||||
cmd = f"security default-keychain -d {domain} -s {keychain}"
|
||||
return __salt__["cmd.run"](cmd, runas=user)
|
||||
|
||||
|
||||
|
@ -233,7 +219,7 @@ def unlock_keychain(keychain, password):
|
|||
|
||||
salt '*' keychain.unlock_keychain /tmp/test.p12 test123
|
||||
"""
|
||||
cmd = "security unlock-keychain -p {} {}".format(password, keychain)
|
||||
cmd = f"security unlock-keychain -p {password} {keychain}"
|
||||
__salt__["cmd.run"](cmd)
|
||||
|
||||
|
||||
|
@ -261,7 +247,7 @@ def get_hash(name, password=None):
|
|||
name, password
|
||||
)
|
||||
else:
|
||||
cmd = 'security find-certificate -c "{}" -m -p'.format(name)
|
||||
cmd = f'security find-certificate -c "{name}" -m -p'
|
||||
|
||||
out = __salt__["cmd.run"](cmd)
|
||||
matches = re.search(
|
||||
|
|
|
@ -9,31 +9,16 @@ import shlex
|
|||
|
||||
import salt.utils.platform
|
||||
|
||||
try:
|
||||
import pipes
|
||||
|
||||
HAS_DEPS = True
|
||||
except ImportError:
|
||||
HAS_DEPS = False
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = "macpackage"
|
||||
|
||||
|
||||
if hasattr(shlex, "quote"):
|
||||
_quote = shlex.quote
|
||||
elif HAS_DEPS and hasattr(pipes, "quote"):
|
||||
_quote = pipes.quote
|
||||
else:
|
||||
_quote = None
|
||||
|
||||
|
||||
def __virtual__():
|
||||
"""
|
||||
Only work on Mac OS
|
||||
"""
|
||||
if salt.utils.platform.is_darwin() and _quote is not None:
|
||||
if salt.utils.platform.is_darwin():
|
||||
return __virtualname__
|
||||
return (False, "Only available on Mac OS systems with pipes")
|
||||
|
||||
|
@ -60,11 +45,11 @@ def install(pkg, target="LocalSystem", store=False, allow_untrusted=False):
|
|||
"""
|
||||
if "*." not in pkg:
|
||||
# If we use wildcards, we cannot use quotes
|
||||
pkg = _quote(pkg)
|
||||
pkg = shlex.quote(pkg)
|
||||
|
||||
target = _quote(target)
|
||||
target = shlex.quote(target)
|
||||
|
||||
cmd = "installer -pkg {} -target {}".format(pkg, target)
|
||||
cmd = f"installer -pkg {pkg} -target {target}"
|
||||
if store:
|
||||
cmd += " -store"
|
||||
if allow_untrusted:
|
||||
|
@ -109,7 +94,7 @@ def install_app(app, target="/Applications/"):
|
|||
if not app[-1] == "/":
|
||||
app += "/"
|
||||
|
||||
cmd = 'rsync -a --delete "{}" "{}"'.format(app, target)
|
||||
cmd = f'rsync -a --delete "{app}" "{target}"'
|
||||
return __salt__["cmd.run"](cmd)
|
||||
|
||||
|
||||
|
@ -154,7 +139,7 @@ def mount(dmg):
|
|||
|
||||
temp_dir = __salt__["temp.dir"](prefix="dmg-")
|
||||
|
||||
cmd = 'hdiutil attach -readonly -nobrowse -mountpoint {} "{}"'.format(temp_dir, dmg)
|
||||
cmd = f'hdiutil attach -readonly -nobrowse -mountpoint {temp_dir} "{dmg}"'
|
||||
|
||||
return __salt__["cmd.run"](cmd), temp_dir
|
||||
|
||||
|
@ -176,7 +161,7 @@ def unmount(mountpoint):
|
|||
salt '*' macpackage.unmount /dev/disk2
|
||||
"""
|
||||
|
||||
cmd = 'hdiutil detach "{}"'.format(mountpoint)
|
||||
cmd = f'hdiutil detach "{mountpoint}"'
|
||||
|
||||
return __salt__["cmd.run"](cmd)
|
||||
|
||||
|
@ -216,7 +201,7 @@ def get_pkg_id(pkg):
|
|||
|
||||
salt '*' macpackage.get_pkg_id /tmp/test.pkg
|
||||
"""
|
||||
pkg = _quote(pkg)
|
||||
pkg = shlex.quote(pkg)
|
||||
package_ids = []
|
||||
|
||||
# Create temp directory
|
||||
|
@ -224,7 +209,7 @@ def get_pkg_id(pkg):
|
|||
|
||||
try:
|
||||
# List all of the PackageInfo files
|
||||
cmd = "xar -t -f {} | grep PackageInfo".format(pkg)
|
||||
cmd = f"xar -t -f {pkg} | grep PackageInfo"
|
||||
out = __salt__["cmd.run"](cmd, python_shell=True, output_loglevel="quiet")
|
||||
files = out.split("\n")
|
||||
|
||||
|
@ -264,12 +249,12 @@ def get_mpkg_ids(mpkg):
|
|||
|
||||
salt '*' macpackage.get_mpkg_ids /dev/disk2
|
||||
"""
|
||||
mpkg = _quote(mpkg)
|
||||
mpkg = shlex.quote(mpkg)
|
||||
package_infos = []
|
||||
base_path = os.path.dirname(mpkg)
|
||||
|
||||
# List all of the .pkg files
|
||||
cmd = "find {} -name *.pkg".format(base_path)
|
||||
cmd = f"find {base_path} -name *.pkg"
|
||||
out = __salt__["cmd.run"](cmd, python_shell=True)
|
||||
|
||||
pkg_files = out.split("\n")
|
||||
|
@ -281,7 +266,7 @@ def get_mpkg_ids(mpkg):
|
|||
|
||||
def _get_pkg_id_from_pkginfo(pkginfo):
|
||||
# Find our identifiers
|
||||
pkginfo = _quote(pkginfo)
|
||||
pkginfo = shlex.quote(pkginfo)
|
||||
cmd = "cat {} | grep -Eo 'identifier=\"[a-zA-Z.0-9\\-]*\"' | cut -c 13- | tr -d '\"'".format(
|
||||
pkginfo
|
||||
)
|
||||
|
@ -294,8 +279,8 @@ def _get_pkg_id_from_pkginfo(pkginfo):
|
|||
|
||||
|
||||
def _get_pkg_id_dir(path):
|
||||
path = _quote(os.path.join(path, "Contents/Info.plist"))
|
||||
cmd = '/usr/libexec/PlistBuddy -c "print :CFBundleIdentifier" {}'.format(path)
|
||||
path = shlex.quote(os.path.join(path, "Contents/Info.plist"))
|
||||
cmd = f'/usr/libexec/PlistBuddy -c "print :CFBundleIdentifier" {path}'
|
||||
|
||||
# We can only use wildcards in python_shell which is
|
||||
# sent by the macpackage state
|
||||
|
|
|
@ -322,7 +322,7 @@ def call(method, *args, **kwargs):
|
|||
napalm_device, # pylint: disable=undefined-variable
|
||||
method,
|
||||
*args,
|
||||
**clean_kwargs
|
||||
**clean_kwargs,
|
||||
)
|
||||
|
||||
|
||||
|
@ -561,7 +561,7 @@ def netmiko_fun(fun, *args, **kwargs):
|
|||
salt '*' napalm.netmiko_fun send_command 'show version'
|
||||
"""
|
||||
if "netmiko." not in fun:
|
||||
fun = "netmiko.{fun}".format(fun=fun)
|
||||
fun = f"netmiko.{fun}"
|
||||
netmiko_kwargs = netmiko_args()
|
||||
kwargs.update(netmiko_kwargs)
|
||||
return __salt__[fun](*args, **kwargs)
|
||||
|
@ -764,7 +764,7 @@ def netmiko_conn(**kwargs):
|
|||
conn.disconnect()
|
||||
"""
|
||||
salt.utils.versions.warn_until(
|
||||
"Chlorine",
|
||||
3007,
|
||||
"This 'napalm_mod.netmiko_conn' function as been deprecated and "
|
||||
"will be removed in the {version} release, as such, it has been "
|
||||
"made an internal function since it is not suitable for CLI usage",
|
||||
|
@ -1038,14 +1038,14 @@ def junos_call(fun, *args, **kwargs):
|
|||
if not prep["result"]:
|
||||
return prep
|
||||
if "junos." not in fun:
|
||||
mod_fun = "junos.{}".format(fun)
|
||||
mod_fun = f"junos.{fun}"
|
||||
else:
|
||||
mod_fun = fun
|
||||
if mod_fun not in __salt__:
|
||||
return {
|
||||
"out": None,
|
||||
"result": False,
|
||||
"comment": "{} is not a valid function".format(fun),
|
||||
"comment": f"{fun} is not a valid function",
|
||||
}
|
||||
return __salt__[mod_fun](*args, **kwargs)
|
||||
|
||||
|
@ -1161,7 +1161,7 @@ def pyeapi_conn(**kwargs):
|
|||
res2 = conn.get_config(as_string=True)
|
||||
"""
|
||||
salt.utils.versions.warn_until(
|
||||
"Chlorine",
|
||||
3007,
|
||||
"This 'napalm_mod.pyeapi_conn' function as been deprecated and "
|
||||
"will be removed in the {version} release, as such, it has been "
|
||||
"made an internal function since it is not suitable for CLI usage",
|
||||
|
@ -1177,7 +1177,7 @@ def pyeapi_config(
|
|||
context=None,
|
||||
defaults=None,
|
||||
saltenv="base",
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
.. versionadded:: 2019.2.0
|
||||
|
@ -1235,7 +1235,7 @@ def pyeapi_config(
|
|||
context=context,
|
||||
defaults=defaults,
|
||||
saltenv=saltenv,
|
||||
**pyeapi_kwargs
|
||||
**pyeapi_kwargs,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1271,7 +1271,7 @@ def nxos_api_config(
|
|||
context=None,
|
||||
defaults=None,
|
||||
saltenv="base",
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
.. versionadded:: 2019.2.0
|
||||
|
@ -1327,7 +1327,7 @@ def nxos_api_config(
|
|||
context=context,
|
||||
defaults=defaults,
|
||||
saltenv=saltenv,
|
||||
**nxos_api_kwargs
|
||||
**nxos_api_kwargs,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1915,7 +1915,7 @@ def scp_get(
|
|||
local_path=local_path,
|
||||
recursive=recursive,
|
||||
preserve_times=preserve_times,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1926,7 +1926,7 @@ def scp_put(
|
|||
recursive=False,
|
||||
preserve_times=False,
|
||||
saltenv="base",
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
.. versionadded:: 2019.2.0
|
||||
|
@ -2021,5 +2021,5 @@ def scp_put(
|
|||
recursive=recursive,
|
||||
preserve_times=preserve_times,
|
||||
saltenv=saltenv,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
|
|
@ -219,7 +219,7 @@ def cmd(command, *args, **kwargs):
|
|||
salt '*' nxos.cmd show_run
|
||||
salt '*' nxos.cmd check_password username=admin password='$5$lkjsdfoi$blahblahblah' encrypted=True
|
||||
"""
|
||||
warn_until("Argon", "'nxos.cmd COMMAND' is deprecated in favor of 'nxos.COMMAND'")
|
||||
warn_until(3008, "'nxos.cmd COMMAND' is deprecated in favor of 'nxos.COMMAND'")
|
||||
|
||||
for k in list(kwargs):
|
||||
if k.startswith("__pub_"):
|
||||
|
@ -258,7 +258,7 @@ def get_roles(username, **kwargs):
|
|||
user = get_user(username)
|
||||
if not user:
|
||||
return []
|
||||
command = "show user-account {}".format(username)
|
||||
command = f"show user-account {username}"
|
||||
info = sendline(command, **kwargs)
|
||||
if isinstance(info, list):
|
||||
info = info[0]
|
||||
|
@ -278,7 +278,7 @@ def get_user(username, **kwargs):
|
|||
|
||||
salt '*' nxos.get_user username=admin
|
||||
"""
|
||||
command = 'show run | include "^username {} password 5 "'.format(username)
|
||||
command = f'show run | include "^username {username} password 5 "'
|
||||
info = sendline(command, **kwargs)
|
||||
if isinstance(info, list):
|
||||
info = info[0]
|
||||
|
@ -383,7 +383,7 @@ def show(commands, raw_text=True, **kwargs):
|
|||
salt 'regular-minion' nxos.show 'show interfaces' host=sw01.example.com username=test password=test
|
||||
"""
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"'nxos.show commands' is deprecated in favor of 'nxos.sendline commands'",
|
||||
)
|
||||
|
||||
|
@ -450,7 +450,7 @@ def system_info(**kwargs):
|
|||
|
||||
salt '*' nxos.system_info
|
||||
"""
|
||||
warn_until("Argon", "'nxos.system_info' is deprecated in favor of 'nxos.grains'")
|
||||
warn_until(3008, "'nxos.system_info' is deprecated in favor of 'nxos.grains'")
|
||||
return salt.utils.nxos.system_info(show_ver(**kwargs))["nxos"]
|
||||
|
||||
|
||||
|
@ -477,7 +477,7 @@ def add_config(lines, **kwargs):
|
|||
For more than one config added per command, lines should be a list.
|
||||
"""
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"'nxos.add_config lines' is deprecated in favor of 'nxos.config commands'",
|
||||
)
|
||||
|
||||
|
@ -492,7 +492,7 @@ def config(
|
|||
context=None,
|
||||
defaults=None,
|
||||
saltenv="base",
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Configures the Nexus switch with the specified commands.
|
||||
|
@ -562,7 +562,7 @@ def config(
|
|||
if config_file:
|
||||
file_str = __salt__["cp.get_file_str"](config_file, saltenv=saltenv)
|
||||
if file_str is False:
|
||||
raise CommandExecutionError("Source file {} not found".format(config_file))
|
||||
raise CommandExecutionError(f"Source file {config_file} not found")
|
||||
elif commands:
|
||||
if isinstance(commands, str):
|
||||
commands = [commands]
|
||||
|
@ -664,7 +664,7 @@ def remove_user(username, **kwargs):
|
|||
|
||||
salt '*' nxos.remove_user username=daniel
|
||||
"""
|
||||
user_line = "no username {}".format(username)
|
||||
user_line = f"no username {username}"
|
||||
kwargs = clean_kwargs(**kwargs)
|
||||
return config(user_line, **kwargs)
|
||||
|
||||
|
@ -681,7 +681,7 @@ def replace(old_value, new_value, full_match=False, **kwargs):
|
|||
salt '*' nxos.replace 'TESTSTRINGHERE' 'NEWTESTSTRINGHERE'
|
||||
"""
|
||||
if full_match is False:
|
||||
matcher = re.compile("^.*{}.*$".format(re.escape(old_value)), re.MULTILINE)
|
||||
matcher = re.compile(f"^.*{re.escape(old_value)}.*$", re.MULTILINE)
|
||||
repl = re.compile(re.escape(old_value))
|
||||
else:
|
||||
matcher = re.compile(old_value, re.MULTILINE)
|
||||
|
@ -719,7 +719,7 @@ def set_password(
|
|||
role=None,
|
||||
crypt_salt=None,
|
||||
algorithm="sha256",
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Set users password on switch.
|
||||
|
@ -767,9 +767,9 @@ def set_password(
|
|||
)
|
||||
else:
|
||||
hashed_pass = password
|
||||
password_line = "username {} password 5 {}".format(username, hashed_pass)
|
||||
password_line = f"username {username} password 5 {hashed_pass}"
|
||||
if role is not None:
|
||||
password_line += " role {}".format(role)
|
||||
password_line += f" role {role}"
|
||||
kwargs = clean_kwargs(**kwargs)
|
||||
return config(password_line, **kwargs)
|
||||
|
||||
|
@ -793,7 +793,7 @@ def set_role(username, role, **kwargs):
|
|||
|
||||
salt '*' nxos.set_role username=daniel role=vdc-admin.
|
||||
"""
|
||||
role_line = "username {} role {}".format(username, role)
|
||||
role_line = f"username {username} role {role}"
|
||||
kwargs = clean_kwargs(**kwargs)
|
||||
return config(role_line, **kwargs)
|
||||
|
||||
|
@ -817,7 +817,7 @@ def unset_role(username, role, **kwargs):
|
|||
|
||||
salt '*' nxos.unset_role username=daniel role=vdc-admin
|
||||
"""
|
||||
role_line = "no username {} role {}".format(username, role)
|
||||
role_line = f"no username {username} role {role}"
|
||||
kwargs = clean_kwargs(**kwargs)
|
||||
return config(role_line, **kwargs)
|
||||
|
||||
|
|
|
@ -13,28 +13,11 @@ import shlex
|
|||
import salt.exceptions
|
||||
import salt.utils.decorators.path
|
||||
|
||||
try:
|
||||
import pipes
|
||||
|
||||
HAS_DEPS = True
|
||||
except ImportError:
|
||||
HAS_DEPS = False
|
||||
|
||||
if hasattr(shlex, "quote"):
|
||||
_quote = shlex.quote
|
||||
elif HAS_DEPS and hasattr(pipes, "quote"):
|
||||
_quote = pipes.quote
|
||||
else:
|
||||
_quote = None
|
||||
|
||||
|
||||
# Don't shadow built-in's.
|
||||
__func_alias__ = {"set_": "set"}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
if _quote is None and not HAS_DEPS:
|
||||
return (False, "Missing dependencies")
|
||||
return True
|
||||
|
||||
|
||||
|
@ -69,10 +52,10 @@ def set_(filename, section, parameter, value):
|
|||
salt-call openstack_config.set /etc/keystone/keystone.conf sql connection foo
|
||||
"""
|
||||
|
||||
filename = _quote(filename)
|
||||
section = _quote(section)
|
||||
parameter = _quote(parameter)
|
||||
value = _quote(str(value))
|
||||
filename = shlex.quote(filename)
|
||||
section = shlex.quote(section)
|
||||
parameter = shlex.quote(parameter)
|
||||
value = shlex.quote(str(value))
|
||||
|
||||
result = __salt__["cmd.run_all"](
|
||||
"openstack-config --set {} {} {} {}".format(
|
||||
|
@ -109,12 +92,12 @@ def get(filename, section, parameter):
|
|||
|
||||
"""
|
||||
|
||||
filename = _quote(filename)
|
||||
section = _quote(section)
|
||||
parameter = _quote(parameter)
|
||||
filename = shlex.quote(filename)
|
||||
section = shlex.quote(section)
|
||||
parameter = shlex.quote(parameter)
|
||||
|
||||
result = __salt__["cmd.run_all"](
|
||||
"openstack-config --get {} {} {}".format(filename, section, parameter),
|
||||
f"openstack-config --get {filename} {section} {parameter}",
|
||||
python_shell=False,
|
||||
)
|
||||
|
||||
|
@ -145,12 +128,12 @@ def delete(filename, section, parameter):
|
|||
salt-call openstack_config.delete /etc/keystone/keystone.conf sql connection
|
||||
"""
|
||||
|
||||
filename = _quote(filename)
|
||||
section = _quote(section)
|
||||
parameter = _quote(parameter)
|
||||
filename = shlex.quote(filename)
|
||||
section = shlex.quote(section)
|
||||
parameter = shlex.quote(parameter)
|
||||
|
||||
result = __salt__["cmd.run_all"](
|
||||
"openstack-config --del {} {} {}".format(filename, section, parameter),
|
||||
f"openstack-config --del {filename} {section} {parameter}",
|
||||
python_shell=False,
|
||||
)
|
||||
|
||||
|
|
|
@ -46,8 +46,8 @@ import hmac
|
|||
import io
|
||||
import logging
|
||||
import os
|
||||
import pipes
|
||||
import re
|
||||
import shlex
|
||||
import tempfile
|
||||
|
||||
import salt.utils.files
|
||||
|
@ -136,7 +136,7 @@ def __virtual__():
|
|||
for util in utils:
|
||||
if not salt.utils.path.which(util):
|
||||
if not _find_pg_binary(util):
|
||||
return (False, "{} was not found".format(util))
|
||||
return (False, f"{util} was not found")
|
||||
return True
|
||||
|
||||
|
||||
|
@ -241,14 +241,14 @@ def _run_initdb(
|
|||
raise CommandExecutionError("initdb executable not found.")
|
||||
cmd = [
|
||||
_INITDB_BIN,
|
||||
"--pgdata={}".format(name),
|
||||
"--username={}".format(user),
|
||||
"--auth={}".format(auth),
|
||||
"--encoding={}".format(encoding),
|
||||
f"--pgdata={name}",
|
||||
f"--username={user}",
|
||||
f"--auth={auth}",
|
||||
f"--encoding={encoding}",
|
||||
]
|
||||
|
||||
if locale is not None:
|
||||
cmd.append("--locale={}".format(locale))
|
||||
cmd.append(f"--locale={locale}")
|
||||
|
||||
# intentionally use short option, as the long option name has been
|
||||
# renamed from "xlogdir" to "waldir" in PostgreSQL 10
|
||||
|
@ -262,9 +262,9 @@ def _run_initdb(
|
|||
if password is not None:
|
||||
pgpassfile = salt.utils.files.mkstemp(text=True)
|
||||
with salt.utils.files.fopen(pgpassfile, "w") as fp_:
|
||||
fp_.write(salt.utils.stringutils.to_str("{}".format(password)))
|
||||
fp_.write(salt.utils.stringutils.to_str(f"{password}"))
|
||||
__salt__["file.chown"](pgpassfile, runas, "")
|
||||
cmd.extend(["--pwfile={}".format(pgpassfile)])
|
||||
cmd.extend([f"--pwfile={pgpassfile}"])
|
||||
|
||||
kwargs = dict(
|
||||
runas=runas,
|
||||
|
@ -273,7 +273,7 @@ def _run_initdb(
|
|||
"postgres.timeout", default=_DEFAULT_COMMAND_TIMEOUT_SECS
|
||||
),
|
||||
)
|
||||
cmdstr = " ".join([pipes.quote(c) for c in cmd])
|
||||
cmdstr = " ".join([shlex.quote(c) for c in cmd])
|
||||
ret = __salt__["cmd.run_all"](cmdstr, python_shell=False, **kwargs)
|
||||
|
||||
if ret.get("retcode", 0) != 0:
|
||||
|
@ -582,9 +582,7 @@ def _quote_ddl_value(value, quote="'"):
|
|||
if value is None:
|
||||
return None
|
||||
if quote in value: # detect trivial sqli
|
||||
raise SaltInvocationError(
|
||||
"Unsupported character {} in value: {}".format(quote, value)
|
||||
)
|
||||
raise SaltInvocationError(f"Unsupported character {quote} in value: {value}")
|
||||
return "{quote}{value}{quote}".format(quote=quote, value=value)
|
||||
|
||||
|
||||
|
@ -617,7 +615,7 @@ def db_create(
|
|||
"""
|
||||
|
||||
# Base query to create a database
|
||||
query = 'CREATE DATABASE "{}"'.format(name)
|
||||
query = f'CREATE DATABASE "{name}"'
|
||||
|
||||
# "With"-options to create a database
|
||||
with_args = salt.utils.odict.OrderedDict(
|
||||
|
@ -685,11 +683,9 @@ def db_alter(
|
|||
else:
|
||||
queries = []
|
||||
if owner:
|
||||
queries.append('ALTER DATABASE "{}" OWNER TO "{}"'.format(name, owner))
|
||||
queries.append(f'ALTER DATABASE "{name}" OWNER TO "{owner}"')
|
||||
if tablespace:
|
||||
queries.append(
|
||||
'ALTER DATABASE "{}" SET TABLESPACE "{}"'.format(name, tablespace)
|
||||
)
|
||||
queries.append(f'ALTER DATABASE "{name}" SET TABLESPACE "{tablespace}"')
|
||||
for query in queries:
|
||||
ret = _psql_prepare_and_run(
|
||||
["-c", query],
|
||||
|
@ -726,10 +722,10 @@ def db_remove(
|
|||
salt '*' postgres.db_remove 'dbname'
|
||||
"""
|
||||
for query in [
|
||||
'REVOKE CONNECT ON DATABASE "{db}" FROM public;'.format(db=name),
|
||||
f'REVOKE CONNECT ON DATABASE "{name}" FROM public;',
|
||||
"SELECT pid, pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname ="
|
||||
" '{db}' AND pid <> pg_backend_pid();".format(db=name),
|
||||
'DROP DATABASE "{db}";'.format(db=name),
|
||||
f'DROP DATABASE "{name}";',
|
||||
]:
|
||||
ret = _psql_prepare_and_run(
|
||||
["-c", query],
|
||||
|
@ -741,7 +737,7 @@ def db_remove(
|
|||
password=password,
|
||||
)
|
||||
if ret["retcode"] != 0:
|
||||
raise Exception("Failed: ret={}".format(ret))
|
||||
raise Exception(f"Failed: ret={ret}")
|
||||
return True
|
||||
|
||||
|
||||
|
@ -846,10 +842,10 @@ def tablespace_create(
|
|||
owner_query = ""
|
||||
options_query = ""
|
||||
if owner:
|
||||
owner_query = 'OWNER "{}"'.format(owner)
|
||||
owner_query = f'OWNER "{owner}"'
|
||||
# should come out looking like: 'OWNER postgres'
|
||||
if options:
|
||||
optionstext = ["{} = {}".format(k, v) for k, v in options.items()]
|
||||
optionstext = [f"{k} = {v}" for k, v in options.items()]
|
||||
options_query = "WITH ( {} )".format(", ".join(optionstext))
|
||||
# should come out looking like: 'WITH ( opt1 = 1.0, opt2 = 4.0 )'
|
||||
query = "CREATE TABLESPACE \"{}\" {} LOCATION '{}' {}".format(
|
||||
|
@ -902,9 +898,9 @@ def tablespace_alter(
|
|||
queries = []
|
||||
|
||||
if new_name:
|
||||
queries.append('ALTER TABLESPACE "{}" RENAME TO "{}"'.format(name, new_name))
|
||||
queries.append(f'ALTER TABLESPACE "{name}" RENAME TO "{new_name}"')
|
||||
if new_owner:
|
||||
queries.append('ALTER TABLESPACE "{}" OWNER TO "{}"'.format(name, new_owner))
|
||||
queries.append(f'ALTER TABLESPACE "{name}" OWNER TO "{new_owner}"')
|
||||
if set_option:
|
||||
queries.append(
|
||||
'ALTER TABLESPACE "{}" SET ({} = {})'.format(
|
||||
|
@ -912,7 +908,7 @@ def tablespace_alter(
|
|||
)
|
||||
)
|
||||
if reset_option:
|
||||
queries.append('ALTER TABLESPACE "{}" RESET ({})'.format(name, reset_option))
|
||||
queries.append(f'ALTER TABLESPACE "{name}" RESET ({reset_option})')
|
||||
|
||||
for query in queries:
|
||||
ret = _psql_prepare_and_run(
|
||||
|
@ -950,7 +946,7 @@ def tablespace_remove(
|
|||
|
||||
.. versionadded:: 2015.8.0
|
||||
"""
|
||||
query = 'DROP TABLESPACE "{}"'.format(name)
|
||||
query = f'DROP TABLESPACE "{name}"'
|
||||
ret = _psql_prepare_and_run(
|
||||
["-c", query],
|
||||
user=user,
|
||||
|
@ -1158,11 +1154,11 @@ def _add_role_flag(string, test, flag, cond=None, prefix="NO", addtxt="", skip=F
|
|||
cond = test
|
||||
if test is not None:
|
||||
if cond:
|
||||
string = "{} {}".format(string, flag)
|
||||
string = f"{string} {flag}"
|
||||
else:
|
||||
string = "{0} {2}{1}".format(string, flag, prefix)
|
||||
string = f"{string} {prefix}{flag}"
|
||||
if addtxt:
|
||||
string = "{} {}".format(string, addtxt)
|
||||
string = f"{string} {addtxt}"
|
||||
return string
|
||||
|
||||
|
||||
|
@ -1224,7 +1220,7 @@ def _verify_password(role, password, verifier, method):
|
|||
def _md5_password(role, password):
|
||||
return "md5{}".format(
|
||||
hashlib.md5( # nosec
|
||||
salt.utils.stringutils.to_bytes("{}{}".format(password, role))
|
||||
salt.utils.stringutils.to_bytes(f"{password}{role}")
|
||||
).hexdigest()
|
||||
)
|
||||
|
||||
|
@ -1343,7 +1339,7 @@ def _role_cmd_args(
|
|||
if isinstance(groups, list):
|
||||
groups = ",".join(groups)
|
||||
for group in groups.split(","):
|
||||
sub_cmd = '{}; GRANT "{}" TO "{}"'.format(sub_cmd, group, name)
|
||||
sub_cmd = f'{sub_cmd}; GRANT "{group}" TO "{name}"'
|
||||
return sub_cmd
|
||||
|
||||
|
||||
|
@ -1380,7 +1376,7 @@ def _role_create(
|
|||
log.info("%s '%s' already exists", typ_.capitalize(), name)
|
||||
return False
|
||||
|
||||
sub_cmd = 'CREATE ROLE "{}" WITH'.format(name)
|
||||
sub_cmd = f'CREATE ROLE "{name}" WITH'
|
||||
sub_cmd = "{} {}".format(
|
||||
sub_cmd,
|
||||
_role_cmd_args(
|
||||
|
@ -1506,7 +1502,7 @@ def _role_update(
|
|||
log.info("%s '%s' could not be found", typ_.capitalize(), name)
|
||||
return False
|
||||
|
||||
sub_cmd = 'ALTER ROLE "{}" WITH'.format(name)
|
||||
sub_cmd = f'ALTER ROLE "{name}" WITH'
|
||||
sub_cmd = "{} {}".format(
|
||||
sub_cmd,
|
||||
_role_cmd_args(
|
||||
|
@ -1613,7 +1609,7 @@ def _role_remove(
|
|||
return False
|
||||
|
||||
# user exists, proceed
|
||||
sub_cmd = 'DROP ROLE "{}"'.format(name)
|
||||
sub_cmd = f'DROP ROLE "{name}"'
|
||||
_psql_prepare_and_run(
|
||||
["-c", sub_cmd],
|
||||
runas=runas,
|
||||
|
@ -1995,14 +1991,14 @@ def create_extension(
|
|||
args = ["CREATE EXTENSION"]
|
||||
if if_not_exists:
|
||||
args.append("IF NOT EXISTS")
|
||||
args.append('"{}"'.format(name))
|
||||
args.append(f'"{name}"')
|
||||
sargs = []
|
||||
if schema:
|
||||
sargs.append('SCHEMA "{}"'.format(schema))
|
||||
sargs.append(f'SCHEMA "{schema}"')
|
||||
if ext_version:
|
||||
sargs.append("VERSION {}".format(ext_version))
|
||||
sargs.append(f"VERSION {ext_version}")
|
||||
if from_version:
|
||||
sargs.append("FROM {}".format(from_version))
|
||||
sargs.append(f"FROM {from_version}")
|
||||
if sargs:
|
||||
args.append("WITH")
|
||||
args.extend(sargs)
|
||||
|
@ -2011,13 +2007,9 @@ def create_extension(
|
|||
else:
|
||||
args = []
|
||||
if schema and _EXTENSION_TO_MOVE in mtdata:
|
||||
args.append(
|
||||
'ALTER EXTENSION "{}" SET SCHEMA "{}";'.format(name, schema)
|
||||
)
|
||||
args.append(f'ALTER EXTENSION "{name}" SET SCHEMA "{schema}";')
|
||||
if ext_version and _EXTENSION_TO_UPGRADE in mtdata:
|
||||
args.append(
|
||||
'ALTER EXTENSION "{}" UPDATE TO {};'.format(name, ext_version)
|
||||
)
|
||||
args.append(f'ALTER EXTENSION "{name}" UPDATE TO {ext_version};')
|
||||
cmd = " ".join(args).strip()
|
||||
if cmd:
|
||||
_psql_prepare_and_run(
|
||||
|
@ -2227,7 +2219,7 @@ def owner_to(
|
|||
|
||||
sqlfile = tempfile.NamedTemporaryFile()
|
||||
sqlfile.write("begin;\n")
|
||||
sqlfile.write('alter database "{}" owner to "{}";\n'.format(dbname, ownername))
|
||||
sqlfile.write(f'alter database "{dbname}" owner to "{ownername}";\n')
|
||||
|
||||
queries = (
|
||||
# schemas
|
||||
|
@ -2335,9 +2327,9 @@ def schema_create(
|
|||
log.info("'%s' already exists in '%s'", name, dbname)
|
||||
return False
|
||||
|
||||
sub_cmd = 'CREATE SCHEMA "{}"'.format(name)
|
||||
sub_cmd = f'CREATE SCHEMA "{name}"'
|
||||
if owner is not None:
|
||||
sub_cmd = '{} AUTHORIZATION "{}"'.format(sub_cmd, owner)
|
||||
sub_cmd = f'{sub_cmd} AUTHORIZATION "{owner}"'
|
||||
|
||||
ret = _psql_prepare_and_run(
|
||||
["-c", sub_cmd],
|
||||
|
@ -2401,7 +2393,7 @@ def schema_remove(
|
|||
return False
|
||||
|
||||
# schema exists, proceed
|
||||
sub_cmd = 'DROP SCHEMA "{}"'.format(name)
|
||||
sub_cmd = f'DROP SCHEMA "{name}"'
|
||||
_psql_prepare_and_run(
|
||||
["-c", sub_cmd],
|
||||
runas=user,
|
||||
|
@ -2721,7 +2713,7 @@ def language_create(
|
|||
log.info("Language %s already exists in %s", name, maintenance_db)
|
||||
return False
|
||||
|
||||
query = "CREATE LANGUAGE {}".format(name)
|
||||
query = f"CREATE LANGUAGE {name}"
|
||||
|
||||
ret = _psql_prepare_and_run(
|
||||
["-c", query],
|
||||
|
@ -2776,7 +2768,7 @@ def language_remove(
|
|||
log.info("Language %s does not exist in %s", name, maintenance_db)
|
||||
return False
|
||||
|
||||
query = "DROP LANGUAGE {}".format(name)
|
||||
query = f"DROP LANGUAGE {name}"
|
||||
|
||||
ret = _psql_prepare_and_run(
|
||||
["-c", query],
|
||||
|
@ -3035,9 +3027,7 @@ def _validate_privileges(object_type, privs, privileges):
|
|||
_perms.append("ALL")
|
||||
|
||||
if object_type not in _PRIVILEGES_OBJECTS:
|
||||
raise SaltInvocationError(
|
||||
"Invalid object_type: {} provided".format(object_type)
|
||||
)
|
||||
raise SaltInvocationError(f"Invalid object_type: {object_type} provided")
|
||||
|
||||
if not set(privs).issubset(set(_perms)):
|
||||
raise SaltInvocationError(
|
||||
|
@ -3145,9 +3135,7 @@ def privileges_list(
|
|||
query = _make_privileges_list_query(name, object_type, prepend)
|
||||
|
||||
if object_type not in _PRIVILEGES_OBJECTS:
|
||||
raise SaltInvocationError(
|
||||
"Invalid object_type: {} provided".format(object_type)
|
||||
)
|
||||
raise SaltInvocationError(f"Invalid object_type: {object_type} provided")
|
||||
|
||||
rows = psql_query(
|
||||
query,
|
||||
|
@ -3439,15 +3427,15 @@ def privileges_grant(
|
|||
_grants = ",".join(_privs)
|
||||
|
||||
if object_type in ["table", "sequence"]:
|
||||
on_part = '{}."{}"'.format(prepend, object_name)
|
||||
on_part = f'{prepend}."{object_name}"'
|
||||
elif object_type == "function":
|
||||
on_part = "{}".format(object_name)
|
||||
on_part = f"{object_name}"
|
||||
else:
|
||||
on_part = '"{}"'.format(object_name)
|
||||
on_part = f'"{object_name}"'
|
||||
|
||||
if grant_option:
|
||||
if object_type == "group":
|
||||
query = 'GRANT {} TO "{}" WITH ADMIN OPTION'.format(object_name, name)
|
||||
query = f'GRANT {object_name} TO "{name}" WITH ADMIN OPTION'
|
||||
elif object_type in ("table", "sequence") and object_name.upper() == "ALL":
|
||||
query = 'GRANT {} ON ALL {}S IN SCHEMA {} TO "{}" WITH GRANT OPTION'.format(
|
||||
_grants, object_type.upper(), prepend, name
|
||||
|
@ -3458,7 +3446,7 @@ def privileges_grant(
|
|||
)
|
||||
else:
|
||||
if object_type == "group":
|
||||
query = 'GRANT {} TO "{}"'.format(object_name, name)
|
||||
query = f'GRANT {object_name} TO "{name}"'
|
||||
elif object_type in ("table", "sequence") and object_name.upper() == "ALL":
|
||||
query = 'GRANT {} ON ALL {}S IN SCHEMA {} TO "{}"'.format(
|
||||
_grants, object_type.upper(), prepend, name
|
||||
|
@ -3587,12 +3575,12 @@ def privileges_revoke(
|
|||
_grants = ",".join(_privs)
|
||||
|
||||
if object_type in ["table", "sequence"]:
|
||||
on_part = "{}.{}".format(prepend, object_name)
|
||||
on_part = f"{prepend}.{object_name}"
|
||||
else:
|
||||
on_part = object_name
|
||||
|
||||
if object_type == "group":
|
||||
query = "REVOKE {} FROM {}".format(object_name, name)
|
||||
query = f"REVOKE {object_name} FROM {name}"
|
||||
else:
|
||||
query = "REVOKE {} ON {} {} FROM {}".format(
|
||||
_grants, object_type.upper(), on_part, name
|
||||
|
|
|
@ -78,7 +78,7 @@ def module_report():
|
|||
if hasattr(__salt__, ref):
|
||||
ret["module_attrs"].append(ref)
|
||||
for func in __salt__[ref]:
|
||||
full = "{}.{}".format(ref, func)
|
||||
full = f"{ref}.{func}"
|
||||
if hasattr(getattr(__salt__, ref), func):
|
||||
ret["function_attrs"].append(full)
|
||||
if func in __salt__[ref]:
|
||||
|
@ -426,7 +426,7 @@ def provider(module):
|
|||
"""
|
||||
func = ""
|
||||
for key in __salt__:
|
||||
if not key.startswith("{}.".format(module)):
|
||||
if not key.startswith(f"{module}."):
|
||||
continue
|
||||
func = key
|
||||
break
|
||||
|
@ -692,7 +692,7 @@ def deprecation_warning():
|
|||
"""
|
||||
# This warn should always stay in Salt.
|
||||
salt.utils.versions.warn_until(
|
||||
"Oganesson",
|
||||
3108,
|
||||
"This is a test deprecation warning by version.",
|
||||
)
|
||||
salt.utils.versions.warn_until_date(
|
||||
|
|
|
@ -41,7 +41,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'vcenter' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
|
|
@ -308,7 +308,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'vsphere' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
@ -356,7 +356,7 @@ def _get_proxy_connection_details():
|
|||
elif proxytype == "esxvm":
|
||||
details = __salt__["esxvm.get_details"]()
|
||||
else:
|
||||
raise CommandExecutionError("'{}' proxy is not supported".format(proxytype))
|
||||
raise CommandExecutionError(f"'{proxytype}' proxy is not supported")
|
||||
proxy_details = [
|
||||
details.get("vcenter") if "vcenter" in details else details.get("host"),
|
||||
details.get("username"),
|
||||
|
@ -784,7 +784,7 @@ def coredump_network_enable(
|
|||
enable_it = 1
|
||||
else:
|
||||
enable_it = 0
|
||||
cmd = "system coredump network set -e {}".format(enable_it)
|
||||
cmd = f"system coredump network set -e {enable_it}"
|
||||
|
||||
ret = {}
|
||||
if esxi_hosts:
|
||||
|
@ -1663,7 +1663,7 @@ def upload_ssh_key(
|
|||
if certificate_verify is None:
|
||||
certificate_verify = True
|
||||
|
||||
url = "{}://{}:{}/host/ssh_root_authorized_keys".format(protocol, host, port)
|
||||
url = f"{protocol}://{host}:{port}/host/ssh_root_authorized_keys"
|
||||
ret = {}
|
||||
result = None
|
||||
try:
|
||||
|
@ -1734,7 +1734,7 @@ def get_ssh_key(
|
|||
if certificate_verify is None:
|
||||
certificate_verify = True
|
||||
|
||||
url = "{}://{}:{}/host/ssh_root_authorized_keys".format(protocol, host, port)
|
||||
url = f"{protocol}://{host}:{port}/host/ssh_root_authorized_keys"
|
||||
ret = {}
|
||||
try:
|
||||
result = salt.utils.http.query(
|
||||
|
@ -1998,11 +1998,7 @@ def get_service_policy(
|
|||
# If we don't have a valid service, return. The service will be invalid for all hosts.
|
||||
if service_name not in valid_services:
|
||||
ret.update(
|
||||
{
|
||||
host_name: {
|
||||
"Error": "{} is not a valid service name.".format(service_name)
|
||||
}
|
||||
}
|
||||
{host_name: {"Error": f"{service_name} is not a valid service name."}}
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -2030,7 +2026,7 @@ def get_service_policy(
|
|||
|
||||
# If we made it this far, something else has gone wrong.
|
||||
if ret.get(host_name) is None:
|
||||
msg = "'vsphere.get_service_policy' failed for host {}.".format(host_name)
|
||||
msg = f"'vsphere.get_service_policy' failed for host {host_name}."
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg}})
|
||||
|
||||
|
@ -2141,11 +2137,7 @@ def get_service_running(
|
|||
# If we don't have a valid service, return. The service will be invalid for all hosts.
|
||||
if service_name not in valid_services:
|
||||
ret.update(
|
||||
{
|
||||
host_name: {
|
||||
"Error": "{} is not a valid service name.".format(service_name)
|
||||
}
|
||||
}
|
||||
{host_name: {"Error": f"{service_name} is not a valid service name."}}
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -2173,7 +2165,7 @@ def get_service_running(
|
|||
|
||||
# If we made it this far, something else has gone wrong.
|
||||
if ret.get(host_name) is None:
|
||||
msg = "'vsphere.get_service_running' failed for host {}.".format(host_name)
|
||||
msg = f"'vsphere.get_service_running' failed for host {host_name}."
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg}})
|
||||
|
||||
|
@ -2330,7 +2322,7 @@ def get_vsan_enabled(
|
|||
|
||||
# We must have a VSAN Config in place get information about VSAN state.
|
||||
if vsan_config is None:
|
||||
msg = "VSAN System Config Manager is unset for host '{}'.".format(host_name)
|
||||
msg = f"VSAN System Config Manager is unset for host '{host_name}'."
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg}})
|
||||
else:
|
||||
|
@ -3237,7 +3229,7 @@ def set_ntp_config(
|
|||
try:
|
||||
date_time_manager.UpdateDateTimeConfig(config=date_config)
|
||||
except vim.fault.HostConfigFault as err:
|
||||
msg = "vsphere.ntp_configure_servers failed: {}".format(err)
|
||||
msg = f"vsphere.ntp_configure_servers failed: {err}"
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg}})
|
||||
continue
|
||||
|
@ -3356,11 +3348,7 @@ def service_start(
|
|||
# If we don't have a valid service, return. The service will be invalid for all hosts.
|
||||
if service_name not in valid_services:
|
||||
ret.update(
|
||||
{
|
||||
host_name: {
|
||||
"Error": "{} is not a valid service name.".format(service_name)
|
||||
}
|
||||
}
|
||||
{host_name: {"Error": f"{service_name} is not a valid service name."}}
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -3499,11 +3487,7 @@ def service_stop(
|
|||
# If we don't have a valid service, return. The service will be invalid for all hosts.
|
||||
if service_name not in valid_services:
|
||||
ret.update(
|
||||
{
|
||||
host_name: {
|
||||
"Error": "{} is not a valid service name.".format(service_name)
|
||||
}
|
||||
}
|
||||
{host_name: {"Error": f"{service_name} is not a valid service name."}}
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -3515,7 +3499,7 @@ def service_stop(
|
|||
try:
|
||||
service_manager.StopService(id=temp_service_name)
|
||||
except vim.fault.HostConfigFault as err:
|
||||
msg = "'vsphere.service_stop' failed for host {}: {}".format(host_name, err)
|
||||
msg = f"'vsphere.service_stop' failed for host {host_name}: {err}"
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg}})
|
||||
continue
|
||||
|
@ -3640,11 +3624,7 @@ def service_restart(
|
|||
# If we don't have a valid service, return. The service will be invalid for all hosts.
|
||||
if service_name not in valid_services:
|
||||
ret.update(
|
||||
{
|
||||
host_name: {
|
||||
"Error": "{} is not a valid service name.".format(service_name)
|
||||
}
|
||||
}
|
||||
{host_name: {"Error": f"{service_name} is not a valid service name."}}
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -3781,11 +3761,7 @@ def set_service_policy(
|
|||
# If we don't have a valid service, return. The service will be invalid for all hosts.
|
||||
if service_name not in valid_services:
|
||||
ret.update(
|
||||
{
|
||||
host_name: {
|
||||
"Error": "{} is not a valid service name.".format(service_name)
|
||||
}
|
||||
}
|
||||
{host_name: {"Error": f"{service_name} is not a valid service name."}}
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -3812,7 +3788,7 @@ def set_service_policy(
|
|||
id=service_key, policy=service_policy
|
||||
)
|
||||
except vim.fault.NotFound:
|
||||
msg = "The service name '{}' was not found.".format(service_name)
|
||||
msg = f"The service name '{service_name}' was not found."
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg}})
|
||||
continue
|
||||
|
@ -4057,7 +4033,7 @@ def vmotion_disable(
|
|||
try:
|
||||
vmotion_system.DeselectVnic()
|
||||
except vim.fault.HostConfigFault as err:
|
||||
msg = "vsphere.vmotion_disable failed: {}".format(err)
|
||||
msg = f"vsphere.vmotion_disable failed: {err}"
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg, "VMotion Disabled": False}})
|
||||
continue
|
||||
|
@ -4145,7 +4121,7 @@ def vmotion_enable(
|
|||
try:
|
||||
vmotion_system.SelectVnic(device)
|
||||
except vim.fault.HostConfigFault as err:
|
||||
msg = "vsphere.vmotion_disable failed: {}".format(err)
|
||||
msg = f"vsphere.vmotion_disable failed: {err}"
|
||||
log.debug(msg)
|
||||
ret.update({host_name: {"Error": msg, "VMotion Enabled": False}})
|
||||
continue
|
||||
|
@ -4872,7 +4848,7 @@ def create_dvs(dvs_dict, dvs_name, service_instance=None):
|
|||
dvs_refs = salt.utils.vmware.get_dvss(dc_ref, dvs_names=[dvs_name])
|
||||
if not dvs_refs:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"DVS '{}' wasn't found in datacenter '{}'".format(dvs_name, datacenter)
|
||||
f"DVS '{dvs_name}' wasn't found in datacenter '{datacenter}'"
|
||||
)
|
||||
dvs_ref = dvs_refs[0]
|
||||
salt.utils.vmware.set_dvs_network_resource_management_enabled(
|
||||
|
@ -4925,7 +4901,7 @@ def update_dvs(dvs_dict, dvs, service_instance=None):
|
|||
dvs_refs = salt.utils.vmware.get_dvss(dc_ref, dvs_names=[dvs])
|
||||
if not dvs_refs:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"DVS '{}' wasn't found in datacenter '{}'".format(dvs, datacenter)
|
||||
f"DVS '{dvs}' wasn't found in datacenter '{datacenter}'"
|
||||
)
|
||||
dvs_ref = dvs_refs[0]
|
||||
# Build the config spec from the input
|
||||
|
@ -5156,7 +5132,7 @@ def list_dvportgroups(dvs=None, portgroup_names=None, service_instance=None):
|
|||
if dvs:
|
||||
dvs_refs = salt.utils.vmware.get_dvss(dc_ref, dvs_names=[dvs])
|
||||
if not dvs_refs:
|
||||
raise VMwareObjectRetrievalError("DVS '{}' was not retrieved".format(dvs))
|
||||
raise VMwareObjectRetrievalError(f"DVS '{dvs}' was not retrieved")
|
||||
dvs_ref = dvs_refs[0]
|
||||
get_all_portgroups = True if not portgroup_names else False
|
||||
for pg_ref in salt.utils.vmware.get_dvportgroups(
|
||||
|
@ -5199,7 +5175,7 @@ def list_uplink_dvportgroup(dvs, service_instance=None):
|
|||
dc_ref = salt.utils.vmware.get_datacenter(service_instance, datacenter)
|
||||
dvs_refs = salt.utils.vmware.get_dvss(dc_ref, dvs_names=[dvs])
|
||||
if not dvs_refs:
|
||||
raise VMwareObjectRetrievalError("DVS '{}' was not retrieved".format(dvs))
|
||||
raise VMwareObjectRetrievalError(f"DVS '{dvs}' was not retrieved")
|
||||
uplink_pg_ref = salt.utils.vmware.get_uplink_dvportgroup(dvs_refs[0])
|
||||
return _get_dvportgroup_dict(uplink_pg_ref)
|
||||
|
||||
|
@ -5434,7 +5410,7 @@ def create_dvportgroup(portgroup_dict, portgroup_name, dvs, service_instance=Non
|
|||
dc_ref = salt.utils.vmware.get_datacenter(service_instance, datacenter)
|
||||
dvs_refs = salt.utils.vmware.get_dvss(dc_ref, dvs_names=[dvs])
|
||||
if not dvs_refs:
|
||||
raise VMwareObjectRetrievalError("DVS '{}' was not retrieved".format(dvs))
|
||||
raise VMwareObjectRetrievalError(f"DVS '{dvs}' was not retrieved")
|
||||
# Make the name of the dvportgroup consistent with the parameter
|
||||
portgroup_dict["name"] = portgroup_name
|
||||
spec = vim.DVPortgroupConfigSpec()
|
||||
|
@ -5490,14 +5466,12 @@ def update_dvportgroup(portgroup_dict, portgroup, dvs, service_instance=True):
|
|||
dc_ref = salt.utils.vmware.get_datacenter(service_instance, datacenter)
|
||||
dvs_refs = salt.utils.vmware.get_dvss(dc_ref, dvs_names=[dvs])
|
||||
if not dvs_refs:
|
||||
raise VMwareObjectRetrievalError("DVS '{}' was not retrieved".format(dvs))
|
||||
raise VMwareObjectRetrievalError(f"DVS '{dvs}' was not retrieved")
|
||||
pg_refs = salt.utils.vmware.get_dvportgroups(
|
||||
dvs_refs[0], portgroup_names=[portgroup]
|
||||
)
|
||||
if not pg_refs:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Portgroup '{}' was not retrieved".format(portgroup)
|
||||
)
|
||||
raise VMwareObjectRetrievalError(f"Portgroup '{portgroup}' was not retrieved")
|
||||
pg_props = salt.utils.vmware.get_properties_of_managed_object(
|
||||
pg_refs[0], ["config"]
|
||||
)
|
||||
|
@ -5556,14 +5530,12 @@ def remove_dvportgroup(portgroup, dvs, service_instance=None):
|
|||
dc_ref = salt.utils.vmware.get_datacenter(service_instance, datacenter)
|
||||
dvs_refs = salt.utils.vmware.get_dvss(dc_ref, dvs_names=[dvs])
|
||||
if not dvs_refs:
|
||||
raise VMwareObjectRetrievalError("DVS '{}' was not retrieved".format(dvs))
|
||||
raise VMwareObjectRetrievalError(f"DVS '{dvs}' was not retrieved")
|
||||
pg_refs = salt.utils.vmware.get_dvportgroups(
|
||||
dvs_refs[0], portgroup_names=[portgroup]
|
||||
)
|
||||
if not pg_refs:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Portgroup '{}' was not retrieved".format(portgroup)
|
||||
)
|
||||
raise VMwareObjectRetrievalError(f"Portgroup '{portgroup}' was not retrieved")
|
||||
salt.utils.vmware.remove_dvportgroup(pg_refs[0])
|
||||
return True
|
||||
|
||||
|
@ -5834,7 +5806,7 @@ def update_storage_policy(policy, policy_dict, service_instance=None):
|
|||
profile_manager = salt.utils.pbm.get_profile_manager(service_instance)
|
||||
policies = salt.utils.pbm.get_storage_policies(profile_manager, [policy])
|
||||
if not policies:
|
||||
raise VMwareObjectRetrievalError("Policy '{}' was not found".format(policy))
|
||||
raise VMwareObjectRetrievalError(f"Policy '{policy}' was not found")
|
||||
policy_ref = policies[0]
|
||||
policy_update_spec = pbm.profile.CapabilityBasedProfileUpdateSpec()
|
||||
log.trace("Setting policy values in policy_update_spec")
|
||||
|
@ -5877,9 +5849,7 @@ def list_default_storage_policy_of_datastore(datastore, service_instance=None):
|
|||
service_instance, target_ref, datastore_names=[datastore]
|
||||
)
|
||||
if not ds_refs:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Datastore '{}' was not found".format(datastore)
|
||||
)
|
||||
raise VMwareObjectRetrievalError(f"Datastore '{datastore}' was not found")
|
||||
profile_manager = salt.utils.pbm.get_profile_manager(service_instance)
|
||||
policy = salt.utils.pbm.get_default_storage_policy_of_datastore(
|
||||
profile_manager, ds_refs[0]
|
||||
|
@ -5921,7 +5891,7 @@ def assign_default_storage_policy_to_datastore(
|
|||
# Find policy
|
||||
policies = salt.utils.pbm.get_storage_policies(profile_manager, [policy])
|
||||
if not policies:
|
||||
raise VMwareObjectRetrievalError("Policy '{}' was not found".format(policy))
|
||||
raise VMwareObjectRetrievalError(f"Policy '{policy}' was not found")
|
||||
policy_ref = policies[0]
|
||||
# Find datastore
|
||||
target_ref = _get_proxy_target(service_instance)
|
||||
|
@ -5929,9 +5899,7 @@ def assign_default_storage_policy_to_datastore(
|
|||
service_instance, target_ref, datastore_names=[datastore]
|
||||
)
|
||||
if not ds_refs:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Datastore '{}' was not found".format(datastore)
|
||||
)
|
||||
raise VMwareObjectRetrievalError(f"Datastore '{datastore}' was not found")
|
||||
ds_ref = ds_refs[0]
|
||||
salt.utils.pbm.assign_default_storage_policy_to_datastore(
|
||||
profile_manager, policy_ref, ds_ref
|
||||
|
@ -6657,7 +6625,7 @@ def create_vmfs_datastore(
|
|||
disks = salt.utils.vmware.get_disks(host_ref, disk_ids=[disk_id])
|
||||
if not disks:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Disk '{}' was not found in host '{}'".format(disk_id, hostname)
|
||||
f"Disk '{disk_id}' was not found in host '{hostname}'"
|
||||
)
|
||||
ds_ref = salt.utils.vmware.create_vmfs_datastore(
|
||||
host_ref, datastore_name, disks[0], vmfs_major_version
|
||||
|
@ -6696,9 +6664,7 @@ def rename_datastore(datastore_name, new_datastore_name, service_instance=None):
|
|||
service_instance, target, datastore_names=[datastore_name]
|
||||
)
|
||||
if not datastores:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Datastore '{}' was not found".format(datastore_name)
|
||||
)
|
||||
raise VMwareObjectRetrievalError(f"Datastore '{datastore_name}' was not found")
|
||||
ds = datastores[0]
|
||||
salt.utils.vmware.rename_datastore(ds, new_datastore_name)
|
||||
return True
|
||||
|
@ -6731,12 +6697,10 @@ def remove_datastore(datastore, service_instance=None):
|
|||
service_instance, reference=target, datastore_names=[datastore]
|
||||
)
|
||||
if not datastores:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Datastore '{}' was not found".format(datastore)
|
||||
)
|
||||
raise VMwareObjectRetrievalError(f"Datastore '{datastore}' was not found")
|
||||
if len(datastores) > 1:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Multiple datastores '{}' were found".format(datastore)
|
||||
f"Multiple datastores '{datastore}' were found"
|
||||
)
|
||||
salt.utils.vmware.remove_datastore(service_instance, datastores[0])
|
||||
return True
|
||||
|
@ -6970,9 +6934,7 @@ def assign_license(
|
|||
if safety_checks:
|
||||
licenses = salt.utils.vmware.get_licenses(service_instance)
|
||||
if not [l for l in licenses if l.licenseKey == license_key]:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"License '{}' wasn't found".format(license_name)
|
||||
)
|
||||
raise VMwareObjectRetrievalError(f"License '{license_name}' wasn't found")
|
||||
salt.utils.vmware.assign_license(
|
||||
service_instance,
|
||||
license_key,
|
||||
|
@ -7348,7 +7310,7 @@ def create_diskgroup(
|
|||
for id in disk_ids:
|
||||
if not [d for d in disks if d.canonicalName == id]:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"No disk with id '{}' was found in ESXi host '{}'".format(id, hostname)
|
||||
f"No disk with id '{id}' was found in ESXi host '{hostname}'"
|
||||
)
|
||||
cache_disk = [d for d in disks if d.canonicalName == cache_disk_id][0]
|
||||
capacity_disks = [d for d in disks if d.canonicalName in capacity_disk_ids]
|
||||
|
@ -7597,7 +7559,7 @@ def get_host_cache(service_instance=None):
|
|||
return {
|
||||
"enabled": True,
|
||||
"datastore": {"name": hci.key.name},
|
||||
"swap_size": "{}MiB".format(hci.swapSize),
|
||||
"swap_size": f"{hci.swapSize}MiB",
|
||||
}
|
||||
|
||||
|
||||
|
@ -7661,7 +7623,7 @@ def configure_host_cache(
|
|||
)
|
||||
if not ds_refs:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"Datastore '{}' was not found on host '{}'".format(datastore, hostname)
|
||||
f"Datastore '{datastore}' was not found on host '{hostname}'"
|
||||
)
|
||||
ds_ref = ds_refs[0]
|
||||
salt.utils.vmware.configure_host_cache(host_ref, ds_ref, swap_size_MiB)
|
||||
|
@ -7960,7 +7922,7 @@ def _set_syslog_config_helper(
|
|||
"""
|
||||
Helper function for set_syslog_config that sets the config and populates the return dictionary.
|
||||
"""
|
||||
cmd = "system syslog config set --{} {}".format(syslog_config, config_value)
|
||||
cmd = f"system syslog config set --{syslog_config} {config_value}"
|
||||
ret_dict = {}
|
||||
|
||||
valid_resets = [
|
||||
|
@ -7975,7 +7937,7 @@ def _set_syslog_config_helper(
|
|||
ret_dict.update(
|
||||
{
|
||||
"success": False,
|
||||
"message": "'{}' is not a valid config variable.".format(syslog_config),
|
||||
"message": f"'{syslog_config}' is not a valid config variable.",
|
||||
}
|
||||
)
|
||||
return ret_dict
|
||||
|
@ -8218,14 +8180,14 @@ def add_host_to_dvs(
|
|||
dvs = salt.utils.vmware._get_dvs(service_instance, dvs_name)
|
||||
if not dvs:
|
||||
ret["message"].append(
|
||||
"No Distributed Virtual Switch found with name {}".format(dvs_name)
|
||||
f"No Distributed Virtual Switch found with name {dvs_name}"
|
||||
)
|
||||
ret["success"] = False
|
||||
|
||||
target_portgroup = salt.utils.vmware._get_dvs_portgroup(dvs, target_portgroup_name)
|
||||
if not target_portgroup:
|
||||
ret["message"].append(
|
||||
"No target portgroup found with name {}".format(target_portgroup_name)
|
||||
f"No target portgroup found with name {target_portgroup_name}"
|
||||
)
|
||||
ret["success"] = False
|
||||
|
||||
|
@ -8234,7 +8196,7 @@ def add_host_to_dvs(
|
|||
)
|
||||
if not uplink_portgroup:
|
||||
ret["message"].append(
|
||||
"No uplink portgroup found with name {}".format(uplink_portgroup_name)
|
||||
f"No uplink portgroup found with name {uplink_portgroup_name}"
|
||||
)
|
||||
ret["success"] = False
|
||||
|
||||
|
@ -8245,7 +8207,7 @@ def add_host_to_dvs(
|
|||
try:
|
||||
host_names = _check_hosts(service_instance, host, host_names)
|
||||
except CommandExecutionError as e:
|
||||
ret["message"] = "Error retrieving hosts: {}".format(e.msg)
|
||||
ret["message"] = f"Error retrieving hosts: {e.msg}"
|
||||
return ret
|
||||
|
||||
for host_name in host_names:
|
||||
|
@ -8272,9 +8234,7 @@ def add_host_to_dvs(
|
|||
p_nics = salt.utils.vmware._get_pnics(host_ref)
|
||||
p_nic = [x for x in p_nics if x.device == vmnic_name]
|
||||
if not p_nic:
|
||||
ret[host_name].update(
|
||||
{"message": "Physical nic {} not found".format(vmknic_name)}
|
||||
)
|
||||
ret[host_name].update({"message": f"Physical nic {vmknic_name} not found"})
|
||||
ret["success"] = False
|
||||
continue
|
||||
|
||||
|
@ -8282,9 +8242,7 @@ def add_host_to_dvs(
|
|||
v_nic = [x for x in v_nics if x.device == vmknic_name]
|
||||
|
||||
if not v_nic:
|
||||
ret[host_name].update(
|
||||
{"message": "Virtual nic {} not found".format(vmnic_name)}
|
||||
)
|
||||
ret[host_name].update({"message": f"Virtual nic {vmnic_name} not found"})
|
||||
ret["success"] = False
|
||||
continue
|
||||
|
||||
|
@ -8377,7 +8335,7 @@ def add_host_to_dvs(
|
|||
except Exception as e: # pylint: disable=broad-except
|
||||
if hasattr(e, "msg"):
|
||||
ret[host_name].update(
|
||||
{"message": "Failed to migrate adapters ({})".format(e.msg)}
|
||||
{"message": f"Failed to migrate adapters ({e.msg})"}
|
||||
)
|
||||
continue
|
||||
else:
|
||||
|
@ -8876,7 +8834,7 @@ def _get_scsi_controller_key(bus_number, scsi_ctrls):
|
|||
]
|
||||
if not keys:
|
||||
raise salt.exceptions.VMwareVmCreationError(
|
||||
"SCSI controller number {} doesn't exist".format(bus_number)
|
||||
f"SCSI controller number {bus_number} doesn't exist"
|
||||
)
|
||||
return keys[0]
|
||||
|
||||
|
@ -9055,7 +9013,7 @@ def _create_network_backing(network_name, switch_type, parent_ref):
|
|||
)
|
||||
if not networks:
|
||||
raise salt.exceptions.VMwareObjectRetrievalError(
|
||||
"The network '{}' could not be retrieved.".format(network_name)
|
||||
f"The network '{network_name}' could not be retrieved."
|
||||
)
|
||||
network_ref = networks[0]
|
||||
backing = vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
|
||||
|
@ -9067,7 +9025,7 @@ def _create_network_backing(network_name, switch_type, parent_ref):
|
|||
)
|
||||
if not networks:
|
||||
raise salt.exceptions.VMwareObjectRetrievalError(
|
||||
"The port group '{}' could not be retrieved.".format(network_name)
|
||||
f"The port group '{network_name}' could not be retrieved."
|
||||
)
|
||||
network_ref = networks[0]
|
||||
dvs_port_connection = vim.dvs.PortConnection(
|
||||
|
@ -9833,7 +9791,7 @@ def _get_device_by_key(devices, key):
|
|||
return device_keys[0]
|
||||
else:
|
||||
raise salt.exceptions.VMwareObjectNotFoundError(
|
||||
"Virtual machine device with unique key {} does not exist".format(key)
|
||||
f"Virtual machine device with unique key {key} does not exist"
|
||||
)
|
||||
|
||||
|
||||
|
@ -9853,7 +9811,7 @@ def _get_device_by_label(devices, label):
|
|||
return device_labels[0]
|
||||
else:
|
||||
raise salt.exceptions.VMwareObjectNotFoundError(
|
||||
"Virtual machine device with label {} does not exist".format(label)
|
||||
f"Virtual machine device with label {label} does not exist"
|
||||
)
|
||||
|
||||
|
||||
|
@ -11207,7 +11165,7 @@ def create_vm(
|
|||
)[0]
|
||||
if not datastore_object:
|
||||
raise salt.exceptions.ArgumentValueError(
|
||||
"Specified datastore: '{}' does not exist.".format(datastore)
|
||||
f"Specified datastore: '{datastore}' does not exist."
|
||||
)
|
||||
try:
|
||||
ds_summary = salt.utils.vmware.get_properties_of_managed_object(
|
||||
|
@ -11218,7 +11176,7 @@ def create_vm(
|
|||
"The vmPathName should be the datastore "
|
||||
"name if the datastore type is vsan"
|
||||
)
|
||||
config_spec.files.vmPathName = "[{}]".format(datastore)
|
||||
config_spec.files.vmPathName = f"[{datastore}]"
|
||||
else:
|
||||
config_spec.files.vmPathName = "[{0}] {1}/{1}.vmx".format(
|
||||
datastore, vm_name
|
||||
|
|
|
@ -96,7 +96,7 @@ def __virtual__():
|
|||
return (False, "Superseded, using x509_v2")
|
||||
if HAS_M2:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The x509 modules are deprecated. Please migrate to the replacement "
|
||||
"modules (x509_v2). They are the default from Salt 3008 (Argon) onwards.",
|
||||
)
|
||||
|
@ -188,7 +188,7 @@ def _parse_openssl_req(csr_filename):
|
|||
"""
|
||||
if not salt.utils.path.which("openssl"):
|
||||
raise salt.exceptions.SaltInvocationError("openssl binary not found in path")
|
||||
cmd = "openssl req -text -noout -in {}".format(csr_filename)
|
||||
cmd = f"openssl req -text -noout -in {csr_filename}"
|
||||
|
||||
output = __salt__["cmd.run_stdout"](cmd)
|
||||
|
||||
|
@ -231,7 +231,7 @@ def _parse_openssl_crl(crl_filename):
|
|||
"""
|
||||
if not salt.utils.path.which("openssl"):
|
||||
raise salt.exceptions.SaltInvocationError("openssl binary not found in path")
|
||||
cmd = "openssl crl -text -noout -in {}".format(crl_filename)
|
||||
cmd = f"openssl crl -text -noout -in {crl_filename}"
|
||||
|
||||
output = __salt__["cmd.run_stdout"](cmd)
|
||||
|
||||
|
@ -316,7 +316,7 @@ def _dec2hex(decval):
|
|||
"""
|
||||
Converts decimal values to nicely formatted hex strings
|
||||
"""
|
||||
return _pretty_hex("{:X}".format(decval))
|
||||
return _pretty_hex(f"{decval:X}")
|
||||
|
||||
|
||||
def _isfile(path):
|
||||
|
@ -505,7 +505,7 @@ def get_pem_entry(text, pem_type=None):
|
|||
pem_temp = pem_temp[pem_temp.index("-") :]
|
||||
text = "\n".join(pem_fixed)
|
||||
|
||||
errmsg = "PEM text not valid:\n{}".format(text)
|
||||
errmsg = f"PEM text not valid:\n{text}"
|
||||
if pem_type:
|
||||
errmsg = "PEM does not contain a single entry of type {}:\n{}".format(
|
||||
pem_type, text
|
||||
|
@ -824,7 +824,7 @@ def write_pem(text, path, overwrite=True, pem_type=None):
|
|||
_fp.write(salt.utils.stringutils.to_str(text))
|
||||
if pem_type and pem_type == "CERTIFICATE" and _dhparams:
|
||||
_fp.write(salt.utils.stringutils.to_str(_dhparams))
|
||||
return "PEM written to {}".format(path)
|
||||
return f"PEM written to {path}"
|
||||
|
||||
|
||||
def create_private_key(
|
||||
|
@ -1130,7 +1130,7 @@ def get_signing_policy(signing_policy_name):
|
|||
"""
|
||||
signing_policy = _get_signing_policy(signing_policy_name)
|
||||
if not signing_policy:
|
||||
return "Signing policy {} does not exist.".format(signing_policy_name)
|
||||
return f"Signing policy {signing_policy_name} does not exist."
|
||||
if isinstance(signing_policy, list):
|
||||
dict_ = {}
|
||||
for item in signing_policy:
|
||||
|
|
|
@ -458,7 +458,7 @@ def create_certificate(
|
|||
# Deprecation checks vs the old x509 module
|
||||
if "algorithm" in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"`algorithm` has been renamed to `digest`. Please update your code.",
|
||||
)
|
||||
kwargs["digest"] = kwargs.pop("algorithm")
|
||||
|
@ -473,7 +473,7 @@ def create_certificate(
|
|||
if "days_valid" not in kwargs and "not_after" not in kwargs:
|
||||
try:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The default value for `days_valid` will change to 30. Please adapt your code accordingly.",
|
||||
)
|
||||
kwargs["days_valid"] = 365
|
||||
|
@ -910,7 +910,7 @@ def create_crl(
|
|||
if days_valid is None:
|
||||
try:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The default value for `days_valid` will change to 7. Please adapt your code accordingly.",
|
||||
)
|
||||
days_valid = 100
|
||||
|
@ -922,14 +922,14 @@ def create_crl(
|
|||
parsed = {}
|
||||
if len(rev) == 1 and isinstance(rev[next(iter(rev))], list):
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"Revoked certificates should be specified as a simple list of dicts.",
|
||||
)
|
||||
for val in rev[next(iter(rev))]:
|
||||
parsed.update(val)
|
||||
if "reason" in (parsed or rev):
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The `reason` parameter for revoked certificates should be specified in extensions:CRLReason.",
|
||||
)
|
||||
salt.utils.dictupdate.set_dict_key_value(
|
||||
|
@ -1079,7 +1079,7 @@ def create_csr(
|
|||
# Deprecation checks vs the old x509 module
|
||||
if "algorithm" in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"`algorithm` has been renamed to `digest`. Please update your code.",
|
||||
)
|
||||
digest = kwargs.pop("algorithm")
|
||||
|
@ -1225,7 +1225,7 @@ def create_private_key(
|
|||
# Deprecation checks vs the old x509 module
|
||||
if "bits" in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"`bits` has been renamed to `keysize`. Please update your code.",
|
||||
)
|
||||
keysize = kwargs.pop("bits")
|
||||
|
@ -1601,7 +1601,7 @@ def get_signing_policy(signing_policy, ca_server=None):
|
|||
for long_name in long_names:
|
||||
if long_name in policy:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
f"Found {long_name} in {signing_policy}. Please migrate to the short name: {name}",
|
||||
)
|
||||
policy[name] = policy.pop(long_name)
|
||||
|
@ -1611,7 +1611,7 @@ def get_signing_policy(signing_policy, ca_server=None):
|
|||
for long_name in long_names:
|
||||
if long_name in policy:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
f"Found {long_name} in {signing_policy}. Please migrate to the short name: {extname}",
|
||||
)
|
||||
policy[extname] = policy.pop(long_name)
|
||||
|
|
|
@ -258,11 +258,7 @@ def _versionlock_pkg(grains=None):
|
|||
elif _yum() == "tdnf":
|
||||
raise SaltInvocationError("Cannot proceed, no versionlock for tdnf")
|
||||
else:
|
||||
return (
|
||||
"yum-versionlock"
|
||||
if int(grains.get("osmajorrelease")) == 5
|
||||
else "yum-plugin-versionlock"
|
||||
)
|
||||
return "yum-plugin-versionlock"
|
||||
|
||||
|
||||
def _check_versionlock():
|
||||
|
|
|
@ -45,8 +45,8 @@ log = logging.getLogger(__name__)
|
|||
|
||||
HAS_ZYPP = False
|
||||
ZYPP_HOME = "/etc/zypp"
|
||||
LOCKS = "{}/locks".format(ZYPP_HOME)
|
||||
REPOS = "{}/repos.d".format(ZYPP_HOME)
|
||||
LOCKS = f"{ZYPP_HOME}/locks"
|
||||
REPOS = f"{ZYPP_HOME}/repos.d"
|
||||
DEFAULT_PRIORITY = 99
|
||||
PKG_ARCH_SEPARATOR = "."
|
||||
|
||||
|
@ -372,9 +372,7 @@ class _Zypper:
|
|||
self.TAG_RELEASED,
|
||||
)
|
||||
if self.error_msg and not self.__no_raise and not self.__ignore_repo_failure:
|
||||
raise CommandExecutionError(
|
||||
"Zypper command failure: {}".format(self.error_msg)
|
||||
)
|
||||
raise CommandExecutionError(f"Zypper command failure: {self.error_msg}")
|
||||
|
||||
return (
|
||||
self._is_xml_mode()
|
||||
|
@ -483,9 +481,7 @@ class Wildcard:
|
|||
"se", "-xv", self.name
|
||||
).getElementsByTagName("solvable")
|
||||
if not solvables:
|
||||
raise CommandExecutionError(
|
||||
"No packages found matching '{}'".format(self.name)
|
||||
)
|
||||
raise CommandExecutionError(f"No packages found matching '{self.name}'")
|
||||
|
||||
return sorted(
|
||||
{
|
||||
|
@ -520,7 +516,7 @@ class Wildcard:
|
|||
self._op = version.replace(exact_version, "") or None
|
||||
if self._op and self._op not in self.Z_OP:
|
||||
raise CommandExecutionError(
|
||||
'Zypper do not supports operator "{}".'.format(self._op)
|
||||
f'Zypper do not supports operator "{self._op}".'
|
||||
)
|
||||
self.version = exact_version
|
||||
|
||||
|
@ -921,7 +917,7 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
|
|||
|
||||
# Results can be different if a different root or a different
|
||||
# inclusion types are passed
|
||||
contextkey = "pkg.list_pkgs_{}_{}".format(root, includes)
|
||||
contextkey = f"pkg.list_pkgs_{root}_{includes}"
|
||||
|
||||
if contextkey in __context__ and kwargs.get("use_context", True):
|
||||
return _list_pkgs_from_context(versions_as_list, contextkey, attr)
|
||||
|
@ -989,7 +985,7 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
|
|||
else:
|
||||
elements = []
|
||||
for element in elements:
|
||||
extended_name = "{}:{}".format(include, element)
|
||||
extended_name = f"{include}:{element}"
|
||||
info = info_available(extended_name, refresh=False, root=root)
|
||||
_ret[extended_name] = [
|
||||
{
|
||||
|
@ -1230,7 +1226,7 @@ def del_repo(repo, root=None):
|
|||
"message": msg[0].childNodes[0].nodeValue,
|
||||
}
|
||||
|
||||
raise CommandExecutionError("Repository '{}' not found.".format(repo))
|
||||
raise CommandExecutionError(f"Repository '{repo}' not found.")
|
||||
|
||||
|
||||
def mod_repo(repo, **kwargs):
|
||||
|
@ -1317,7 +1313,7 @@ def mod_repo(repo, **kwargs):
|
|||
|
||||
if new_url == base_url:
|
||||
raise CommandExecutionError(
|
||||
"Repository '{}' already exists as '{}'.".format(repo, alias)
|
||||
f"Repository '{repo}' already exists as '{alias}'."
|
||||
)
|
||||
|
||||
# Add new repo
|
||||
|
@ -1473,7 +1469,7 @@ def install(
|
|||
ignore_repo_failure=False,
|
||||
no_recommends=False,
|
||||
root=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
|
||||
|
@ -1640,7 +1636,7 @@ def install(
|
|||
prefix, verstr = salt.utils.pkg.split_comparison(version_num)
|
||||
if not prefix:
|
||||
prefix = "="
|
||||
target = "{}{}{}".format(param, prefix, verstr)
|
||||
target = f"{param}{prefix}{verstr}"
|
||||
log.debug("targeting package: %s", target)
|
||||
targets.append(target)
|
||||
elif pkg_type == "advisory":
|
||||
|
@ -1648,9 +1644,7 @@ def install(
|
|||
cur_patches = list_patches(root=root)
|
||||
for advisory_id in pkg_params:
|
||||
if advisory_id not in cur_patches:
|
||||
raise CommandExecutionError(
|
||||
'Advisory id "{}" not found'.format(advisory_id)
|
||||
)
|
||||
raise CommandExecutionError(f'Advisory id "{advisory_id}" not found')
|
||||
else:
|
||||
# If we add here the `patch:` prefix, the
|
||||
# `_find_types` helper will take the patches into the
|
||||
|
@ -1703,7 +1697,7 @@ def install(
|
|||
# if the name of the package is already prefixed with 'patch:' we
|
||||
# can avoid listing them in the `advisory_ids` field.
|
||||
if pkg_type == "advisory":
|
||||
targets = ["patch:{}".format(t) for t in targets]
|
||||
targets = [f"patch:{t}" for t in targets]
|
||||
|
||||
# Split the targets into batches of 500 packages each, so that
|
||||
# the maximal length of the command line is not broken
|
||||
|
@ -1767,7 +1761,7 @@ def upgrade(
|
|||
no_recommends=False,
|
||||
root=None,
|
||||
diff_attr=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
): # pylint: disable=unused-argument
|
||||
"""
|
||||
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
|
||||
|
@ -2186,7 +2180,7 @@ def list_holds(pattern=None, full=True, root=None, **kwargs):
|
|||
)
|
||||
)
|
||||
|
||||
ptrn_re = re.compile(r"{}-\S+".format(pattern)) if pattern else None
|
||||
ptrn_re = re.compile(rf"{pattern}-\S+") if pattern else None
|
||||
for pkg_name, pkg_editions in inst_pkgs.items():
|
||||
for pkg_info in pkg_editions:
|
||||
pkg_ret = (
|
||||
|
@ -2328,14 +2322,12 @@ def unhold(name=None, pkgs=None, root=None, **kwargs):
|
|||
target
|
||||
)
|
||||
else:
|
||||
removed.append(
|
||||
target if not lock_ver else "{}={}".format(target, lock_ver)
|
||||
)
|
||||
removed.append(target if not lock_ver else f"{target}={lock_ver}")
|
||||
ret[target]["changes"]["new"] = ""
|
||||
ret[target]["changes"]["old"] = "hold"
|
||||
ret[target]["comment"] = "Package {} is no longer held.".format(target)
|
||||
ret[target]["comment"] = f"Package {target} is no longer held."
|
||||
else:
|
||||
ret[target]["comment"] = "Package {} was already unheld.".format(target)
|
||||
ret[target]["comment"] = f"Package {target} was already unheld."
|
||||
|
||||
if removed:
|
||||
__zypper__(root=root).call("rl", *removed)
|
||||
|
@ -2387,10 +2379,10 @@ def hold(name=None, pkgs=None, root=None, **kwargs):
|
|||
(target, version) = next(iter(target.items()))
|
||||
ret[target] = {"name": target, "changes": {}, "result": True, "comment": ""}
|
||||
if not locks.get(target):
|
||||
added.append(target if not version else "{}={}".format(target, version))
|
||||
added.append(target if not version else f"{target}={version}")
|
||||
ret[target]["changes"]["new"] = "hold"
|
||||
ret[target]["changes"]["old"] = ""
|
||||
ret[target]["comment"] = "Package {} is now being held.".format(target)
|
||||
ret[target]["comment"] = f"Package {target} is now being held."
|
||||
else:
|
||||
ret[target]["comment"] = "Package {} is already set to be held.".format(
|
||||
target
|
||||
|
@ -2740,7 +2732,7 @@ def search(criteria, refresh=False, **kwargs):
|
|||
.getElementsByTagName("solvable")
|
||||
)
|
||||
if not solvables:
|
||||
raise CommandExecutionError("No packages found matching '{}'".format(criteria))
|
||||
raise CommandExecutionError(f"No packages found matching '{criteria}'")
|
||||
|
||||
out = {}
|
||||
for solvable in solvables:
|
||||
|
|
|
@ -441,7 +441,7 @@ def _decrypt_ciphertext(cipher):
|
|||
)
|
||||
else:
|
||||
salt.utils.versions.warn_until(
|
||||
"Chlorine",
|
||||
3007,
|
||||
"After the Chlorine release of Salt, gpg_decrypt_must_succeed will default to True.",
|
||||
)
|
||||
return cipher
|
||||
|
|
|
@ -107,7 +107,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxcluster' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
@ -140,7 +140,7 @@ def _get_vsan_datastore(si, cluster_name):
|
|||
|
||||
if not vsan_datastores:
|
||||
raise salt.exceptions.VMwareObjectRetrievalError(
|
||||
"No vSAN datastores where retrieved for cluster '{}'".format(cluster_name)
|
||||
f"No vSAN datastores where retrieved for cluster '{cluster_name}'"
|
||||
)
|
||||
return vsan_datastores[0]
|
||||
|
||||
|
@ -201,9 +201,7 @@ def cluster_configured(name, cluster_config):
|
|||
__salt__["esxcluster.get_details"]()["datacenter"],
|
||||
)
|
||||
else:
|
||||
raise salt.exceptions.CommandExecutionError(
|
||||
"Unsupported proxy {}".format(proxy_type)
|
||||
)
|
||||
raise salt.exceptions.CommandExecutionError(f"Unsupported proxy {proxy_type}")
|
||||
log.info(
|
||||
"Running %s for cluster '%s' in datacenter '%s'",
|
||||
name,
|
||||
|
@ -288,13 +286,11 @@ def cluster_configured(name, cluster_config):
|
|||
changes_required = True
|
||||
changes_str = ""
|
||||
if diff.diffs:
|
||||
changes_str = "{}{}".format(changes_str, diff.changes_str)
|
||||
changes_str = f"{changes_str}{diff.changes_str}"
|
||||
if ldiff and ldiff.diffs:
|
||||
changes_str = "{}\nha:\n options:\n{}".format(
|
||||
changes_str,
|
||||
"\n".join(
|
||||
[" {}".format(l) for l in ldiff.changes_str2.split("\n")]
|
||||
),
|
||||
"\n".join([f" {l}" for l in ldiff.changes_str2.split("\n")]),
|
||||
)
|
||||
# Apply the changes
|
||||
if __opts__["test"]:
|
||||
|
@ -355,7 +351,7 @@ def vsan_datastore_configured(name, datastore_name):
|
|||
__salt__["esxcluster.get_details"]()["cluster"],
|
||||
__salt__["esxcluster.get_details"]()["datacenter"],
|
||||
)
|
||||
display_name = "{}/{}".format(datacenter_name, cluster_name)
|
||||
display_name = f"{datacenter_name}/{cluster_name}"
|
||||
log.info("Running vsan_datastore_configured for '%s'", display_name)
|
||||
ret = {"name": name, "changes": {}, "result": None, "comment": "Default"}
|
||||
comments = []
|
||||
|
@ -394,9 +390,7 @@ def vsan_datastore_configured(name, datastore_name):
|
|||
new_datastore_name=datastore_name,
|
||||
service_instance=si,
|
||||
)
|
||||
comments.append(
|
||||
"Renamed vSAN datastore to '{}'.".format(datastore_name)
|
||||
)
|
||||
comments.append(f"Renamed vSAN datastore to '{datastore_name}'.")
|
||||
changes = {
|
||||
"vsan_datastore": {
|
||||
"new": {"name": datastore_name},
|
||||
|
@ -446,7 +440,7 @@ def licenses_configured(name, licenses=None):
|
|||
__salt__["esxcluster.get_details"]()["cluster"],
|
||||
__salt__["esxcluster.get_details"]()["datacenter"],
|
||||
)
|
||||
display_name = "{}/{}".format(datacenter_name, cluster_name)
|
||||
display_name = f"{datacenter_name}/{cluster_name}"
|
||||
log.info("Running licenses configured for '%s'", display_name)
|
||||
log.trace("licenses = %s", licenses)
|
||||
entity = {"type": "cluster", "datacenter": datacenter_name, "cluster": cluster_name}
|
||||
|
@ -496,7 +490,7 @@ def licenses_configured(name, licenses=None):
|
|||
log.error(comments[-1])
|
||||
has_errors = True
|
||||
continue
|
||||
comments.append("Added license '{}'.".format(license_name))
|
||||
comments.append(f"Added license '{license_name}'.")
|
||||
log.info(comments[-1])
|
||||
else:
|
||||
# License exists let's check if it's assigned to the cluster
|
||||
|
|
|
@ -81,7 +81,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxdatacenter' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
@ -129,16 +129,16 @@ def datacenter_configured(name):
|
|||
)
|
||||
if not dcs:
|
||||
if __opts__["test"]:
|
||||
comments.append("State will create datacenter '{}'.".format(dc_name))
|
||||
comments.append(f"State will create datacenter '{dc_name}'.")
|
||||
else:
|
||||
log.debug("Creating datacenter '%s'", dc_name)
|
||||
__salt__["vsphere.create_datacenter"](dc_name, si)
|
||||
comments.append("Created datacenter '{}'.".format(dc_name))
|
||||
comments.append(f"Created datacenter '{dc_name}'.")
|
||||
log.info(comments[-1])
|
||||
ret["changes"].update({"new": {"name": dc_name}})
|
||||
else:
|
||||
comments.append(
|
||||
"Datacenter '{}' already exists. Nothing to be done.".format(dc_name)
|
||||
f"Datacenter '{dc_name}' already exists. Nothing to be done."
|
||||
)
|
||||
log.info(comments[-1])
|
||||
__salt__["vsphere.disconnect"](si)
|
||||
|
|
|
@ -160,7 +160,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxi' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
@ -226,7 +226,7 @@ def coredump_configured(name, enabled, dump_ip, host_vnic="vmk0", dump_port=6500
|
|||
current_config = __salt__[esxi_cmd]("get_coredump_network_config").get(host)
|
||||
error = current_config.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
|
||||
current_config = current_config.get("Coredump Config")
|
||||
|
@ -242,7 +242,7 @@ def coredump_configured(name, enabled, dump_ip, host_vnic="vmk0", dump_port=6500
|
|||
).get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
|
||||
# Allow users to disable core dump, but then return since
|
||||
|
@ -295,7 +295,7 @@ def coredump_configured(name, enabled, dump_ip, host_vnic="vmk0", dump_port=6500
|
|||
msg = response.get("stderr")
|
||||
if not msg:
|
||||
msg = response.get("stdout")
|
||||
ret["comment"] = "Error: {}".format(msg)
|
||||
ret["comment"] = f"Error: {msg}"
|
||||
return ret
|
||||
|
||||
ret["result"] = True
|
||||
|
@ -354,7 +354,7 @@ def password_present(name, password):
|
|||
__salt__[esxi_cmd]("update_host_password", new_password=password)
|
||||
except CommandExecutionError as err:
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Error: {}".format(err)
|
||||
ret["comment"] = f"Error: {err}"
|
||||
return ret
|
||||
|
||||
return ret
|
||||
|
@ -427,7 +427,7 @@ def ntp_configured(
|
|||
ntp_running = __salt__[esxi_cmd]("get_service_running", service_name=ntpd).get(host)
|
||||
error = ntp_running.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ntp_running = ntp_running.get(ntpd)
|
||||
|
||||
|
@ -440,7 +440,7 @@ def ntp_configured(
|
|||
).get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
# Set changes dictionary for ntp_servers
|
||||
ret["changes"].update({"ntp_servers": {"old": ntp_config, "new": ntp_servers}})
|
||||
|
@ -456,7 +456,7 @@ def ntp_configured(
|
|||
)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
# Stop ntpd if service_running=False
|
||||
else:
|
||||
|
@ -465,7 +465,7 @@ def ntp_configured(
|
|||
)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{"service_running": {"old": ntp_running, "new": service_running}}
|
||||
|
@ -478,7 +478,7 @@ def ntp_configured(
|
|||
).get(host)
|
||||
error = current_service_policy.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
current_service_policy = current_service_policy.get(ntpd)
|
||||
|
||||
|
@ -492,7 +492,7 @@ def ntp_configured(
|
|||
).get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{
|
||||
|
@ -510,7 +510,7 @@ def ntp_configured(
|
|||
response = __salt__[esxi_cmd]("update_host_datetime").get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{"update_datetime": {"old": "", "new": "Host datetime was updated."}}
|
||||
|
@ -525,7 +525,7 @@ def ntp_configured(
|
|||
)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{"service_restart": {"old": "", "new": "NTP Daemon Restarted."}}
|
||||
|
@ -587,14 +587,14 @@ def vmotion_configured(name, enabled, device="vmk0"):
|
|||
response = __salt__[esxi_cmd]("vmotion_enable", device=device).get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
# Disable VMotion if enabled=False
|
||||
else:
|
||||
response = __salt__[esxi_cmd]("vmotion_disable").get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{"enabled": {"old": current_vmotion_enabled, "new": enabled}}
|
||||
|
@ -647,7 +647,7 @@ def vsan_configured(name, enabled, add_disks_to_vsan=False):
|
|||
current_vsan_enabled = __salt__[esxi_cmd]("get_vsan_enabled").get(host)
|
||||
error = current_vsan_enabled.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
current_vsan_enabled = current_vsan_enabled.get("VSAN Enabled")
|
||||
|
||||
|
@ -660,14 +660,14 @@ def vsan_configured(name, enabled, add_disks_to_vsan=False):
|
|||
response = __salt__[esxi_cmd]("vsan_enable").get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
# Disable VSAN if enabled=False
|
||||
else:
|
||||
response = __salt__[esxi_cmd]("vsan_disable").get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{"enabled": {"old": current_vsan_enabled, "new": enabled}}
|
||||
|
@ -678,7 +678,7 @@ def vsan_configured(name, enabled, add_disks_to_vsan=False):
|
|||
current_eligible_disks = __salt__[esxi_cmd]("get_vsan_eligible_disks").get(host)
|
||||
error = current_eligible_disks.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
|
||||
disks = current_eligible_disks.get("Eligible")
|
||||
|
@ -688,7 +688,7 @@ def vsan_configured(name, enabled, add_disks_to_vsan=False):
|
|||
response = __salt__[esxi_cmd]("vsan_add_disks").get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
|
||||
ret["changes"].update({"add_disks_to_vsan": {"old": "", "new": disks}})
|
||||
|
@ -779,7 +779,7 @@ def ssh_configured(
|
|||
ssh_running = __salt__[esxi_cmd]("get_service_running", service_name=ssh).get(host)
|
||||
error = ssh_running.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ssh_running = ssh_running.get(ssh)
|
||||
|
||||
|
@ -792,14 +792,14 @@ def ssh_configured(
|
|||
enable = __salt__[esxi_cmd]("service_start", service_name=ssh).get(host)
|
||||
error = enable.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
# Disable SSH if service_running=False
|
||||
else:
|
||||
disable = __salt__[esxi_cmd]("service_stop", service_name=ssh).get(host)
|
||||
error = disable.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
|
||||
ret["changes"].update(
|
||||
|
@ -815,7 +815,7 @@ def ssh_configured(
|
|||
)
|
||||
error = current_ssh_key.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
current_ssh_key = current_ssh_key.get("key")
|
||||
if current_ssh_key:
|
||||
|
@ -854,7 +854,7 @@ def ssh_configured(
|
|||
)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{
|
||||
|
@ -872,7 +872,7 @@ def ssh_configured(
|
|||
).get(host)
|
||||
error = current_service_policy.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
current_service_policy = current_service_policy.get(ssh)
|
||||
|
||||
|
@ -886,7 +886,7 @@ def ssh_configured(
|
|||
).get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{
|
||||
|
@ -904,7 +904,7 @@ def ssh_configured(
|
|||
response = __salt__[esxi_cmd]("service_restart", service_name=ssh).get(host)
|
||||
error = response.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
ret["changes"].update(
|
||||
{"service_restart": {"old": "", "new": "SSH service restarted."}}
|
||||
|
@ -1008,7 +1008,7 @@ def syslog_configured(
|
|||
"There was an error resetting a syslog config '{}'."
|
||||
"Please check debug logs.".format(val)
|
||||
)
|
||||
ret["comment"] = "Error: {}".format(msg)
|
||||
ret["comment"] = f"Error: {msg}"
|
||||
return ret
|
||||
|
||||
ret["changes"].update(
|
||||
|
@ -1018,7 +1018,7 @@ def syslog_configured(
|
|||
current_firewall = __salt__[esxi_cmd]("get_firewall_status").get(host)
|
||||
error = current_firewall.get("Error")
|
||||
if error:
|
||||
ret["comment"] = "Error: {}".format(error)
|
||||
ret["comment"] = f"Error: {error}"
|
||||
return ret
|
||||
|
||||
current_firewall = current_firewall.get("rulesets").get("syslog")
|
||||
|
@ -1033,7 +1033,7 @@ def syslog_configured(
|
|||
if enabled.get("retcode") != 0:
|
||||
err = enabled.get("stderr")
|
||||
out = enabled.get("stdout")
|
||||
ret["comment"] = "Error: {}".format(err if err else out)
|
||||
ret["comment"] = f"Error: {err if err else out}"
|
||||
return ret
|
||||
|
||||
ret["changes"].update({"firewall": {"old": current_firewall, "new": firewall}})
|
||||
|
@ -1045,7 +1045,7 @@ def syslog_configured(
|
|||
try:
|
||||
lookup_key = _lookup_syslog_config(key)
|
||||
except KeyError:
|
||||
ret["comment"] = "'{}' is not a valid config variable.".format(key)
|
||||
ret["comment"] = f"'{key}' is not a valid config variable."
|
||||
return ret
|
||||
|
||||
current_val = current_syslog_config[lookup_key]
|
||||
|
@ -1158,7 +1158,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
host_disks = __salt__["vsphere.list_disks"](service_instance=si)
|
||||
if not host_disks:
|
||||
raise VMwareObjectRetrievalError(
|
||||
"No disks retrieved from host '{}'".format(hostname)
|
||||
f"No disks retrieved from host '{hostname}'"
|
||||
)
|
||||
scsi_addr_to_disk_map = {d["scsi_address"]: d for d in host_disks}
|
||||
log.trace("scsi_addr_to_disk_map = %s", scsi_addr_to_disk_map)
|
||||
|
@ -1199,14 +1199,12 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
bad_scsi_addrs.append(scsi_addr)
|
||||
continue
|
||||
capacity_disk_ids.append(scsi_addr_to_disk_map[scsi_addr]["id"])
|
||||
capacity_disk_displays.append(
|
||||
"{} (id:{})".format(scsi_addr, capacity_disk_ids[-1])
|
||||
)
|
||||
capacity_disk_displays.append(f"{scsi_addr} (id:{capacity_disk_ids[-1]})")
|
||||
if bad_scsi_addrs:
|
||||
comments.append(
|
||||
"Error in diskgroup #{}: capacity disks with scsi addresses {} "
|
||||
"were not found.".format(
|
||||
idx, ", ".join(["'{}'".format(a) for a in bad_scsi_addrs])
|
||||
idx, ", ".join([f"'{a}'" for a in bad_scsi_addrs])
|
||||
)
|
||||
)
|
||||
log.error(comments[-1])
|
||||
|
@ -1227,9 +1225,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
name,
|
||||
idx,
|
||||
cache_disk_display,
|
||||
", ".join(
|
||||
["'{}'".format(a) for a in capacity_disk_displays]
|
||||
),
|
||||
", ".join([f"'{a}'" for a in capacity_disk_displays]),
|
||||
)
|
||||
)
|
||||
else:
|
||||
|
@ -1244,9 +1240,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
"{}".format(
|
||||
idx,
|
||||
cache_disk_display,
|
||||
", ".join(
|
||||
["'{}'".format(a) for a in capacity_disk_displays]
|
||||
),
|
||||
", ".join([f"'{a}'" for a in capacity_disk_displays]),
|
||||
)
|
||||
)
|
||||
log.info(comments[-1])
|
||||
|
@ -1259,7 +1253,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
name,
|
||||
idx,
|
||||
cache_disk_display,
|
||||
", ".join(["'{}'".format(a) for a in capacity_disk_displays]),
|
||||
", ".join([f"'{a}'" for a in capacity_disk_displays]),
|
||||
)
|
||||
)
|
||||
log.info(comments[-1])
|
||||
|
@ -1273,12 +1267,12 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
service_instance=si,
|
||||
)
|
||||
except VMwareSaltError as err:
|
||||
comments.append("Error creating disk group #{}: {}.".format(idx, err))
|
||||
comments.append(f"Error creating disk group #{idx}: {err}.")
|
||||
log.error(comments[-1])
|
||||
errors = True
|
||||
continue
|
||||
|
||||
comments.append("Created disk group #'{}'.".format(idx))
|
||||
comments.append(f"Created disk group #'{idx}'.")
|
||||
log.info(comments[-1])
|
||||
diskgroup_changes[str(idx)] = {
|
||||
"new": {"cache": cache_disk_display, "capacity": capacity_disk_displays}
|
||||
|
@ -1311,9 +1305,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
d["scsi_address"] for d in host_disks if d["id"] == disk_id
|
||||
][0]
|
||||
added_capacity_disk_ids.append(disk_id)
|
||||
added_capacity_disk_displays.append(
|
||||
"{} (id:{})".format(disk_scsi_addr, disk_id)
|
||||
)
|
||||
added_capacity_disk_displays.append(f"{disk_scsi_addr} (id:{disk_id})")
|
||||
for disk_id in existing_diskgroup["capacity_disks"]:
|
||||
if disk_id not in capacity_disk_ids:
|
||||
disk_scsi_addr = [
|
||||
|
@ -1321,7 +1313,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
][0]
|
||||
removed_capacity_disk_ids.append(disk_id)
|
||||
removed_capacity_disk_displays.append(
|
||||
"{} (id:{})".format(disk_scsi_addr, disk_id)
|
||||
f"{disk_scsi_addr} (id:{disk_id})"
|
||||
)
|
||||
|
||||
log.debug(
|
||||
|
@ -1339,9 +1331,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
"Error removing capacity disk(s) {} from disk group #{}; "
|
||||
"operation is not supported."
|
||||
"".format(
|
||||
", ".join(
|
||||
["'{}'".format(id) for id in removed_capacity_disk_displays]
|
||||
),
|
||||
", ".join([f"'{id}'" for id in removed_capacity_disk_displays]),
|
||||
idx,
|
||||
)
|
||||
)
|
||||
|
@ -1354,7 +1344,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
|
||||
# Building a string representation of the capacity disks
|
||||
# that need to be added
|
||||
s = ", ".join(["'{}'".format(id) for id in added_capacity_disk_displays])
|
||||
s = ", ".join([f"'{id}'" for id in added_capacity_disk_displays])
|
||||
if __opts__["test"]:
|
||||
comments.append(
|
||||
"State {} will add capacity disk(s) {} to disk group #{}.".format(
|
||||
|
@ -1381,7 +1371,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
errors = True
|
||||
continue
|
||||
|
||||
com = "Added capacity disk(s) {} to disk group #{}".format(s, idx)
|
||||
com = f"Added capacity disk(s) {s} to disk group #{idx}"
|
||||
log.info(com)
|
||||
comments.append(com)
|
||||
diskgroup_changes[str(idx)] = {
|
||||
|
@ -1398,7 +1388,7 @@ def diskgroups_configured(name, diskgroups, erase_disks=False):
|
|||
continue
|
||||
|
||||
# No capacity needs to be added
|
||||
s = "Disk group #{} is correctly configured. Nothing to be done.".format(idx)
|
||||
s = f"Disk group #{idx} is correctly configured. Nothing to be done."
|
||||
log.info(s)
|
||||
comments.append(s)
|
||||
__salt__["vsphere.disconnect"](si)
|
||||
|
@ -1662,7 +1652,7 @@ def host_cache_configured(
|
|||
backing_disk["id"],
|
||||
", ".join(
|
||||
[
|
||||
"'{}'".format(disk)
|
||||
f"'{disk}'"
|
||||
for disk in existing_datastores[0]["backing_disk_ids"]
|
||||
]
|
||||
),
|
||||
|
@ -1710,8 +1700,8 @@ def host_cache_configured(
|
|||
changes.update(
|
||||
{
|
||||
"swap_size": {
|
||||
"old": "{}GiB".format(existing_swap_size_MiB / 1024),
|
||||
"new": "{}GiB".format(swap_size_MiB / 1024),
|
||||
"old": f"{existing_swap_size_MiB / 1024}GiB",
|
||||
"new": f"{swap_size_MiB / 1024}GiB",
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -1746,7 +1736,7 @@ def host_cache_configured(
|
|||
swap_size_MiB=swap_size_MiB,
|
||||
service_instance=si,
|
||||
)
|
||||
comments.append("Host cache configured on host '{}'.".format(hostname))
|
||||
comments.append(f"Host cache configured on host '{hostname}'.")
|
||||
else:
|
||||
comments.append(
|
||||
"Host cache on host '{}' is already correctly "
|
||||
|
@ -1766,7 +1756,7 @@ def host_cache_configured(
|
|||
ret.update(
|
||||
{
|
||||
"result": False if not __opts__["test"] else None,
|
||||
"comment": "{}.".format(err),
|
||||
"comment": f"{err}.",
|
||||
}
|
||||
)
|
||||
return ret
|
||||
|
|
|
@ -247,7 +247,7 @@ def _deprecation_message(function):
|
|||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'esxvm' functionality in Salt has been deprecated and its "
|
||||
"functionality will be removed in version 3008 in favor of the "
|
||||
"saltext.vmware Salt Extension. "
|
||||
|
@ -467,7 +467,7 @@ def vm_updated(
|
|||
{
|
||||
"result": True,
|
||||
"changes": {},
|
||||
"comment": "Virtual machine {} is already up to date".format(vm_name),
|
||||
"comment": f"Virtual machine {vm_name} is already up to date",
|
||||
}
|
||||
)
|
||||
return result
|
||||
|
@ -532,7 +532,7 @@ def vm_updated(
|
|||
"name": name,
|
||||
"result": True,
|
||||
"changes": changes,
|
||||
"comment": "Virtual machine {} was updated successfully".format(vm_name),
|
||||
"comment": f"Virtual machine {vm_name} was updated successfully",
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -565,7 +565,7 @@ def vm_created(
|
|||
result = {"name": name, "result": None, "changes": {}, "comment": ""}
|
||||
|
||||
if __opts__["test"]:
|
||||
result["comment"] = "Virtual machine {} will be created".format(vm_name)
|
||||
result["comment"] = f"Virtual machine {vm_name} will be created"
|
||||
return result
|
||||
|
||||
service_instance = __salt__["vsphere.get_service_instance_via_proxy"]()
|
||||
|
@ -615,7 +615,7 @@ def vm_created(
|
|||
"name": name,
|
||||
"result": True,
|
||||
"changes": changes,
|
||||
"comment": "Virtual machine {} created successfully".format(vm_name),
|
||||
"comment": f"Virtual machine {vm_name} created successfully",
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -629,7 +629,7 @@ def vm_registered(vm_name, datacenter, placement, vm_file, power_on=False):
|
|||
"""
|
||||
result = {"name": vm_name, "result": None, "changes": {}, "comment": ""}
|
||||
|
||||
vmx_path = "{}{}".format(vm_file.folderPath, vm_file.file[0].path)
|
||||
vmx_path = f"{vm_file.folderPath}{vm_file.file[0].path}"
|
||||
log.trace("Registering virtual machine with vmx file: %s", vmx_path)
|
||||
service_instance = __salt__["vsphere.get_service_instance_via_proxy"]()
|
||||
try:
|
||||
|
@ -665,7 +665,7 @@ def vm_registered(vm_name, datacenter, placement, vm_file, power_on=False):
|
|||
{
|
||||
"result": True,
|
||||
"changes": {"name": vm_name, "power_on": power_on},
|
||||
"comment": "Virtual machine {} registered successfully".format(vm_name),
|
||||
"comment": f"Virtual machine {vm_name} registered successfully",
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -1791,9 +1791,11 @@ def symlink(
|
|||
|
||||
if __salt__["file.is_link"](name):
|
||||
# The link exists, verify that it matches the target
|
||||
if os.path.normpath(__salt__["file.readlink"](name)) == os.path.normpath(
|
||||
if os.path.normpath(__salt__["file.readlink"](name)) != os.path.normpath(
|
||||
target
|
||||
):
|
||||
__salt__["file.remove"](name)
|
||||
else:
|
||||
if _check_symlink_ownership(name, user, group, win_owner):
|
||||
# The link looks good!
|
||||
if salt.utils.platform.is_windows():
|
||||
|
|
|
@ -463,7 +463,7 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
|
|||
pre = __salt__["pkg.get_repo"](repo=repo, **kwargs)
|
||||
except CommandExecutionError as exc:
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Failed to examine repo '{}': {}".format(name, exc)
|
||||
ret["comment"] = f"Failed to examine repo '{name}': {exc}"
|
||||
return ret
|
||||
|
||||
# This is because of how apt-sources works. This pushes distro logic
|
||||
|
@ -545,7 +545,7 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
|
|||
break
|
||||
else:
|
||||
ret["result"] = True
|
||||
ret["comment"] = "Package repo '{}' already configured".format(name)
|
||||
ret["comment"] = f"Package repo '{name}' already configured"
|
||||
return ret
|
||||
|
||||
if __opts__["test"]:
|
||||
|
@ -580,7 +580,7 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
|
|||
# This is another way to pass information back from the mod_repo
|
||||
# function.
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Failed to configure repo '{}': {}".format(name, exc)
|
||||
ret["comment"] = f"Failed to configure repo '{name}': {exc}"
|
||||
return ret
|
||||
|
||||
try:
|
||||
|
@ -596,10 +596,10 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
|
|||
ret["changes"] = {"repo": repo}
|
||||
|
||||
ret["result"] = True
|
||||
ret["comment"] = "Configured package repo '{}'".format(name)
|
||||
ret["comment"] = f"Configured package repo '{name}'"
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Failed to confirm config of repo '{}': {}".format(name, exc)
|
||||
ret["comment"] = f"Failed to confirm config of repo '{name}': {exc}"
|
||||
|
||||
# Clear cache of available packages, if present, since changes to the
|
||||
# repositories may change the packages that are available.
|
||||
|
@ -699,11 +699,11 @@ def absent(name, **kwargs):
|
|||
repo = __salt__["pkg.get_repo"](stripname, **kwargs)
|
||||
except CommandExecutionError as exc:
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Failed to configure repo '{}': {}".format(name, exc)
|
||||
ret["comment"] = f"Failed to configure repo '{name}': {exc}"
|
||||
return ret
|
||||
|
||||
if not repo:
|
||||
ret["comment"] = "Package repo {} is absent".format(name)
|
||||
ret["comment"] = f"Package repo {name} is absent"
|
||||
ret["result"] = True
|
||||
return ret
|
||||
|
||||
|
@ -726,7 +726,7 @@ def absent(name, **kwargs):
|
|||
repos = __salt__["pkg.list_repos"]()
|
||||
if stripname not in repos:
|
||||
ret["changes"]["repo"] = name
|
||||
ret["comment"] = "Removed repo {}".format(name)
|
||||
ret["comment"] = f"Removed repo {name}"
|
||||
|
||||
if not remove_key:
|
||||
ret["result"] = True
|
||||
|
@ -735,13 +735,13 @@ def absent(name, **kwargs):
|
|||
removed_keyid = __salt__["pkg.del_repo_key"](stripname, **kwargs)
|
||||
except (CommandExecutionError, SaltInvocationError) as exc:
|
||||
ret["result"] = False
|
||||
ret["comment"] += ", but failed to remove key: {}".format(exc)
|
||||
ret["comment"] += f", but failed to remove key: {exc}"
|
||||
else:
|
||||
ret["result"] = True
|
||||
ret["changes"]["keyid"] = removed_keyid
|
||||
ret["comment"] += ", and keyid {}".format(removed_keyid)
|
||||
ret["comment"] += f", and keyid {removed_keyid}"
|
||||
else:
|
||||
ret["result"] = False
|
||||
ret["comment"] = "Failed to remove repo {}".format(name)
|
||||
ret["comment"] = f"Failed to remove repo {name}"
|
||||
|
||||
return ret
|
||||
|
|
|
@ -663,7 +663,7 @@ def wait_for_event(name, id_list, event_id="id", timeout=300, node="master"):
|
|||
ret = {"name": name, "changes": {}, "comment": "", "result": False}
|
||||
|
||||
if __opts__.get("test"):
|
||||
ret["comment"] = "Orchestration would wait for event '{}'".format(name)
|
||||
ret["comment"] = f"Orchestration would wait for event '{name}'"
|
||||
ret["result"] = None
|
||||
return ret
|
||||
|
||||
|
@ -793,7 +793,7 @@ def runner(name, **kwargs):
|
|||
"name": name,
|
||||
"result": None,
|
||||
"changes": {},
|
||||
"comment": "Runner function '{}' would be executed.".format(name),
|
||||
"comment": f"Runner function '{name}' would be executed.",
|
||||
}
|
||||
return ret
|
||||
|
||||
|
@ -815,7 +815,7 @@ def runner(name, **kwargs):
|
|||
if features.get("enable_deprecated_orchestration_flag", False):
|
||||
ret["__orchestration__"] = True
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The __orchestration__ return flag will be removed in Salt Argon. "
|
||||
"For more information see https://github.com/saltstack/salt/pull/59917.",
|
||||
)
|
||||
|
@ -918,7 +918,7 @@ def parallel_runners(name, runners, **kwargs): # pylint: disable=unused-argumen
|
|||
"result": False,
|
||||
"success": False,
|
||||
"changes": {},
|
||||
"comment": "One of the runners raised an exception: {}".format(exc),
|
||||
"comment": f"One of the runners raised an exception: {exc}",
|
||||
}
|
||||
# We bundle the results of the runners with the IDs of the runners so that
|
||||
# we can easily identify which output belongs to which runner. At the same
|
||||
|
@ -997,7 +997,7 @@ def parallel_runners(name, runners, **kwargs): # pylint: disable=unused-argumen
|
|||
comment = "All runner functions executed successfully."
|
||||
else:
|
||||
if len(failed_runners) == 1:
|
||||
comment = "Runner {} failed.".format(failed_runners[0])
|
||||
comment = f"Runner {failed_runners[0]} failed."
|
||||
else:
|
||||
comment = "Runners {} failed.".format(", ".join(failed_runners))
|
||||
changes = {"ret": {runner_id: out for runner_id, out in outputs.items()}}
|
||||
|
@ -1043,7 +1043,7 @@ def wheel(name, **kwargs):
|
|||
if __opts__.get("test", False):
|
||||
ret["result"] = (None,)
|
||||
ret["changes"] = {}
|
||||
ret["comment"] = "Wheel function '{}' would be executed.".format(name)
|
||||
ret["comment"] = f"Wheel function '{name}' would be executed."
|
||||
return ret
|
||||
|
||||
out = __salt__["saltutil.wheel"](
|
||||
|
@ -1064,7 +1064,7 @@ def wheel(name, **kwargs):
|
|||
if features.get("enable_deprecated_orchestration_flag", False):
|
||||
ret["__orchestration__"] = True
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The __orchestration__ return flag will be removed in Salt Argon. "
|
||||
"For more information see https://github.com/saltstack/salt/pull/59917.",
|
||||
)
|
||||
|
|
|
@ -209,7 +209,7 @@ def __virtual__():
|
|||
return (False, "Superseded, using x509_v2")
|
||||
if "x509.get_pem_entry" in __salt__:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The x509 modules are deprecated. Please migrate to the replacement "
|
||||
"modules (x509_v2). They are the default from Salt 3008 (Argon) onwards.",
|
||||
)
|
||||
|
@ -287,7 +287,7 @@ def private_key_managed(
|
|||
new=False,
|
||||
overwrite=False,
|
||||
verbose=True,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Manage a private key's existence.
|
||||
|
@ -391,7 +391,7 @@ def csr_managed(name, **kwargs):
|
|||
try:
|
||||
old = __salt__["x509.read_csr"](name)
|
||||
except salt.exceptions.SaltInvocationError:
|
||||
old = "{} is not a valid csr.".format(name)
|
||||
old = f"{name} is not a valid csr."
|
||||
|
||||
file_args, kwargs = _get_file_args(name, **kwargs)
|
||||
file_args["contents"] = __salt__["x509.create_csr"](text=True, **kwargs)
|
||||
|
@ -518,7 +518,7 @@ def _certificate_is_valid(name, days_remaining, append_certs, **cert_spec):
|
|||
If False, also provide a message explaining why.
|
||||
"""
|
||||
if not os.path.isfile(name):
|
||||
return False, "{} does not exist".format(name), {}
|
||||
return False, f"{name} does not exist", {}
|
||||
|
||||
try:
|
||||
cert_info = __salt__["x509.read_certificate"](certificate=name)
|
||||
|
@ -570,7 +570,7 @@ def _certificate_is_valid(name, days_remaining, append_certs, **cert_spec):
|
|||
|
||||
return True, "", cert_info
|
||||
except salt.exceptions.SaltInvocationError as e:
|
||||
return False, "{} is not a valid certificate: {}".format(name, str(e)), {}
|
||||
return False, f"{name} is not a valid certificate: {str(e)}", {}
|
||||
|
||||
|
||||
def _certificate_file_managed(ret, file_args):
|
||||
|
@ -699,7 +699,7 @@ def certificate_managed(name, days_remaining=90, append_certs=None, **kwargs):
|
|||
ret = _certificate_file_managed(ret, file_args)
|
||||
|
||||
ret["result"] = None
|
||||
ret["comment"] = "Certificate {} will be created".format(name)
|
||||
ret["comment"] = f"Certificate {name} will be created"
|
||||
ret["changes"]["Status"] = {
|
||||
"Old": invalid_reason,
|
||||
"New": "Certificate will be valid and up to date",
|
||||
|
@ -764,7 +764,7 @@ def crl_managed(
|
|||
digest="",
|
||||
days_remaining=30,
|
||||
include_expired=False,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Manage a Certificate Revocation List
|
||||
|
@ -846,9 +846,9 @@ def crl_managed(
|
|||
if days_remaining == 0:
|
||||
days_remaining = current_days_remaining - 1
|
||||
except salt.exceptions.SaltInvocationError:
|
||||
current = "{} is not a valid CRL.".format(name)
|
||||
current = f"{name} is not a valid CRL."
|
||||
else:
|
||||
current = "{} does not exist.".format(name)
|
||||
current = f"{name} does not exist."
|
||||
|
||||
new_crl = __salt__["x509.create_crl"](
|
||||
text=True,
|
||||
|
|
|
@ -390,7 +390,7 @@ def certificate_managed(
|
|||
if days_valid is None and not_after is None:
|
||||
try:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The default value for `days_valid` will change to 30. Please adapt your code accordingly.",
|
||||
)
|
||||
days_valid = 365
|
||||
|
@ -400,7 +400,7 @@ def certificate_managed(
|
|||
if days_remaining is None:
|
||||
try:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The default value for `days_remaining` will change to 7. Please adapt your code accordingly.",
|
||||
)
|
||||
days_remaining = 90
|
||||
|
@ -409,7 +409,7 @@ def certificate_managed(
|
|||
|
||||
if "algorithm" in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"`algorithm` has been renamed to `digest`. Please update your code.",
|
||||
)
|
||||
digest = kwargs.pop("algorithm")
|
||||
|
@ -782,7 +782,7 @@ def crl_managed(
|
|||
if days_valid is None:
|
||||
try:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The default value for `days_valid` will change to 7. Please adapt your code accordingly.",
|
||||
)
|
||||
days_valid = 100
|
||||
|
@ -792,7 +792,7 @@ def crl_managed(
|
|||
if days_remaining is None:
|
||||
try:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The default value for `days_remaining` will change to 3. Please adapt your code accordingly.",
|
||||
)
|
||||
days_remaining = 30
|
||||
|
@ -804,14 +804,14 @@ def crl_managed(
|
|||
parsed = {}
|
||||
if len(rev) == 1 and isinstance(rev[next(iter(rev))], list):
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"Revoked certificates should be specified as a simple list of dicts.",
|
||||
)
|
||||
for val in rev[next(iter(rev))]:
|
||||
parsed.update(val)
|
||||
if "reason" in (parsed or rev):
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"The `reason` parameter for revoked certificates should be specified in extensions:CRLReason.",
|
||||
)
|
||||
salt.utils.dictupdate.set_dict_key_value(
|
||||
|
@ -1056,7 +1056,7 @@ def csr_managed(
|
|||
# Deprecation checks vs the old x509 module
|
||||
if "algorithm" in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"`algorithm` has been renamed to `digest`. Please update your code.",
|
||||
)
|
||||
digest = kwargs.pop("algorithm")
|
||||
|
@ -1323,7 +1323,7 @@ def private_key_managed(
|
|||
# Deprecation checks vs the old x509 module
|
||||
if "bits" in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"`bits` has been renamed to `keysize`. Please update your code.",
|
||||
)
|
||||
keysize = kwargs.pop("bits")
|
||||
|
|
|
@ -25,7 +25,7 @@ class ReqChannel:
|
|||
import salt.channel.client
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.client instead.",
|
||||
)
|
||||
return salt.channel.client.ReqChannel.factory(opts, **kwargs)
|
||||
|
@ -41,7 +41,7 @@ class PushChannel:
|
|||
import salt.channel.client
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.client instead.",
|
||||
)
|
||||
return salt.channel.client.PushChannel.factory(opts, **kwargs)
|
||||
|
@ -57,7 +57,7 @@ class PullChannel:
|
|||
import salt.channel.client
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.client instead.",
|
||||
)
|
||||
return salt.channel.client.PullChannel.factory(opts, **kwargs)
|
||||
|
@ -75,7 +75,7 @@ class AsyncReqChannel:
|
|||
import salt.channel.client
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.client instead.",
|
||||
)
|
||||
return salt.channel.client.AsyncReqChannel.factory(opts, **kwargs)
|
||||
|
@ -91,7 +91,7 @@ class AsyncPubChannel:
|
|||
import salt.channel.client
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.client instead.",
|
||||
)
|
||||
return salt.channel.client.AsyncPubChannel.factory(opts, **kwargs)
|
||||
|
@ -110,7 +110,7 @@ class AsyncPushChannel:
|
|||
import salt.channel.client
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.client instead.",
|
||||
)
|
||||
return salt.channel.client.AsyncPushChannel.factory(opts, **kwargs)
|
||||
|
@ -129,7 +129,7 @@ class AsyncPullChannel:
|
|||
import salt.channel.client
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.client instead.",
|
||||
)
|
||||
return salt.channel.client.AsyncPullChannel.factory(opts, **kwargs)
|
||||
|
|
|
@ -25,7 +25,7 @@ class ReqServerChannel:
|
|||
import salt.channel.server
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.server instead.",
|
||||
)
|
||||
return salt.channel.server.ReqServerChannel.factory(opts, **kwargs)
|
||||
|
@ -41,7 +41,7 @@ class PubServerChannel:
|
|||
import salt.channel.server
|
||||
|
||||
warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"This module is deprecated. Please use salt.channel.server instead.",
|
||||
)
|
||||
return salt.channel.server.PubServerChannel.factory(opts, **kwargs)
|
||||
|
|
|
@ -10,8 +10,8 @@ import hashlib
|
|||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import pipes
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import socket
|
||||
import stat
|
||||
|
@ -199,7 +199,7 @@ def __ssh_gateway_arguments(kwargs):
|
|||
"-oUserKnownHostsFile=/dev/null",
|
||||
"-oControlPath=none",
|
||||
str(ssh_gateway_key),
|
||||
"{}@{}".format(ssh_gateway_user, ssh_gateway),
|
||||
f"{ssh_gateway_user}@{ssh_gateway}",
|
||||
"-p",
|
||||
str(ssh_gateway_port),
|
||||
str(ssh_gateway_command),
|
||||
|
@ -228,18 +228,18 @@ def os_script(os_, vm_=None, opts=None, minion=""):
|
|||
# The user provided an absolute path to the deploy script, let's use it
|
||||
return __render_script(os_, vm_, opts, minion)
|
||||
|
||||
if os.path.isabs("{}.sh".format(os_)):
|
||||
if os.path.isabs(f"{os_}.sh"):
|
||||
# The user provided an absolute path to the deploy script, although no
|
||||
# extension was provided. Let's use it anyway.
|
||||
return __render_script("{}.sh".format(os_), vm_, opts, minion)
|
||||
return __render_script(f"{os_}.sh", vm_, opts, minion)
|
||||
|
||||
for search_path in opts["deploy_scripts_search_path"]:
|
||||
if os.path.isfile(os.path.join(search_path, os_)):
|
||||
return __render_script(os.path.join(search_path, os_), vm_, opts, minion)
|
||||
|
||||
if os.path.isfile(os.path.join(search_path, "{}.sh".format(os_))):
|
||||
if os.path.isfile(os.path.join(search_path, f"{os_}.sh")):
|
||||
return __render_script(
|
||||
os.path.join(search_path, "{}.sh".format(os_)), vm_, opts, minion
|
||||
os.path.join(search_path, f"{os_}.sh"), vm_, opts, minion
|
||||
)
|
||||
# No deploy script was found, return an empty string
|
||||
return ""
|
||||
|
@ -416,7 +416,7 @@ def bootstrap(vm_, opts=None):
|
|||
)
|
||||
if key_filename is not None and not os.path.isfile(key_filename):
|
||||
raise SaltCloudConfigError(
|
||||
"The defined ssh_keyfile '{}' does not exist".format(key_filename)
|
||||
f"The defined ssh_keyfile '{key_filename}' does not exist"
|
||||
)
|
||||
has_ssh_agent = False
|
||||
if (
|
||||
|
@ -782,8 +782,8 @@ def wait_for_port(
|
|||
# Don't add new hosts to the host key database
|
||||
"-oStrictHostKeyChecking=no",
|
||||
# make sure ssh can time out on connection lose
|
||||
"-oServerAliveInterval={}".format(server_alive_interval),
|
||||
"-oServerAliveCountMax={}".format(server_alive_count_max),
|
||||
f"-oServerAliveInterval={server_alive_interval}",
|
||||
f"-oServerAliveCountMax={server_alive_count_max}",
|
||||
# Set hosts key database path to /dev/null, i.e., non-existing
|
||||
"-oUserKnownHostsFile=/dev/null",
|
||||
# Don't re-use the SSH connection. Less failures.
|
||||
|
@ -808,21 +808,21 @@ def wait_for_port(
|
|||
]
|
||||
)
|
||||
# Netcat command testing remote port
|
||||
command = "nc -z -w5 -q0 {} {}".format(host, port)
|
||||
command = f"nc -z -w5 -q0 {host} {port}"
|
||||
# SSH command
|
||||
pcmd = "ssh {} {}@{} -p {} {}".format(
|
||||
" ".join(ssh_args),
|
||||
gateway["ssh_gateway_user"],
|
||||
ssh_gateway,
|
||||
ssh_gateway_port,
|
||||
pipes.quote("date"),
|
||||
shlex.quote("date"),
|
||||
)
|
||||
cmd = "ssh {} {}@{} -p {} {}".format(
|
||||
" ".join(ssh_args),
|
||||
gateway["ssh_gateway_user"],
|
||||
ssh_gateway,
|
||||
ssh_gateway_port,
|
||||
pipes.quote(command),
|
||||
shlex.quote(command),
|
||||
)
|
||||
log.debug("SSH command: '%s'", cmd)
|
||||
|
||||
|
@ -893,7 +893,7 @@ class Client:
|
|||
service_name=None,
|
||||
):
|
||||
self.service_name = service_name
|
||||
self._exe_file = "{}.exe".format(self.service_name)
|
||||
self._exe_file = f"{self.service_name}.exe"
|
||||
self._client = PsExecClient(server, username, password, port, encrypt)
|
||||
self._client._service = ScmrService(self.service_name, self._client.session)
|
||||
|
||||
|
@ -943,7 +943,7 @@ class Client:
|
|||
# delete the PAExec executable
|
||||
smb_tree = TreeConnect(
|
||||
self._client.session,
|
||||
r"\\{}\ADMIN$".format(self._client.connection.server_name),
|
||||
rf"\\{self._client.connection.server_name}\ADMIN$",
|
||||
)
|
||||
log.info("Connecting to SMB Tree %s", smb_tree.share_name)
|
||||
smb_tree.connect()
|
||||
|
@ -968,10 +968,10 @@ def run_winexe_command(cmd, args, host, username, password, port=445):
|
|||
"""
|
||||
Run a command remotely via the winexe executable
|
||||
"""
|
||||
creds = "-U '{}%{}' //{}".format(username, password, host)
|
||||
logging_creds = "-U '{}%XXX-REDACTED-XXX' //{}".format(username, host)
|
||||
cmd = "winexe {} {} {}".format(creds, cmd, args)
|
||||
logging_cmd = "winexe {} {} {}".format(logging_creds, cmd, args)
|
||||
creds = f"-U '{username}%{password}' //{host}"
|
||||
logging_creds = f"-U '{username}%XXX-REDACTED-XXX' //{host}"
|
||||
cmd = f"winexe {creds} {cmd} {args}"
|
||||
logging_cmd = f"winexe {logging_creds} {cmd} {args}"
|
||||
return win_cmd(cmd, logging_command=logging_cmd)
|
||||
|
||||
|
||||
|
@ -979,7 +979,7 @@ def run_psexec_command(cmd, args, host, username, password, port=445):
|
|||
"""
|
||||
Run a command remotely using the psexec protocol
|
||||
"""
|
||||
service_name = "PS-Exec-{}".format(uuid.uuid4())
|
||||
service_name = f"PS-Exec-{uuid.uuid4()}"
|
||||
with Client(
|
||||
host, username, password, port=port, encrypt=False, service_name=service_name
|
||||
) as client:
|
||||
|
@ -1098,7 +1098,7 @@ def validate_windows_cred_winexe(
|
|||
"""
|
||||
Check if the windows credentials are valid
|
||||
"""
|
||||
cmd = "winexe -U '{}%{}' //{} \"hostname\"".format(username, password, host)
|
||||
cmd = f"winexe -U '{username}%{password}' //{host} \"hostname\""
|
||||
logging_cmd = "winexe -U '{}%XXX-REDACTED-XXX' //{} \"hostname\"".format(
|
||||
username, host
|
||||
)
|
||||
|
@ -1240,7 +1240,7 @@ def deploy_windows(
|
|||
winrm_port=5986,
|
||||
winrm_use_ssl=True,
|
||||
winrm_verify_ssl=True,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Copy the install files to a remote Windows box, and execute them
|
||||
|
@ -1299,20 +1299,20 @@ def deploy_windows(
|
|||
|
||||
salt.utils.smb.mkdirs("salttemp", conn=smb_conn)
|
||||
root_dir = "ProgramData/Salt Project/Salt"
|
||||
salt.utils.smb.mkdirs("{}/conf/pki/minion".format(root_dir), conn=smb_conn)
|
||||
salt.utils.smb.mkdirs(f"{root_dir}/conf/pki/minion", conn=smb_conn)
|
||||
root_dir = "ProgramData\\Salt Project\\Salt"
|
||||
|
||||
if minion_pub:
|
||||
salt.utils.smb.put_str(
|
||||
minion_pub,
|
||||
"{}\\conf\\pki\\minion\\minion.pub".format(root_dir),
|
||||
f"{root_dir}\\conf\\pki\\minion\\minion.pub",
|
||||
conn=smb_conn,
|
||||
)
|
||||
|
||||
if minion_pem:
|
||||
salt.utils.smb.put_str(
|
||||
minion_pem,
|
||||
"{}\\conf\\pki\\minion\\minion.pem".format(root_dir),
|
||||
f"{root_dir}\\conf\\pki\\minion\\minion.pem",
|
||||
conn=smb_conn,
|
||||
)
|
||||
|
||||
|
@ -1324,7 +1324,7 @@ def deploy_windows(
|
|||
try:
|
||||
salt.utils.smb.put_file(
|
||||
master_sign_pub_file,
|
||||
"{}\\conf\\pki\\minion\\master_sign.pub".format(root_dir),
|
||||
f"{root_dir}\\conf\\pki\\minion\\master_sign.pub",
|
||||
conn=smb_conn,
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
|
@ -1342,16 +1342,16 @@ def deploy_windows(
|
|||
installer = comps[-1]
|
||||
salt.utils.smb.put_file(
|
||||
win_installer,
|
||||
"salttemp\\{}".format(installer),
|
||||
f"salttemp\\{installer}",
|
||||
"C$",
|
||||
conn=smb_conn,
|
||||
)
|
||||
|
||||
cmd = "c:\\salttemp\\{}".format(installer)
|
||||
cmd = f"c:\\salttemp\\{installer}"
|
||||
args = [
|
||||
"/S",
|
||||
"/master={}".format(_format_master_param(master)),
|
||||
"/minion-name={}".format(name),
|
||||
f"/master={_format_master_param(master)}",
|
||||
f"/minion-name={name}",
|
||||
]
|
||||
|
||||
if use_winrm:
|
||||
|
@ -1362,7 +1362,7 @@ def deploy_windows(
|
|||
)
|
||||
|
||||
if ret_code != 0:
|
||||
raise Exception("Fail installer {}".format(ret_code))
|
||||
raise Exception(f"Fail installer {ret_code}")
|
||||
|
||||
# Copy over minion_conf
|
||||
if minion_conf:
|
||||
|
@ -1378,7 +1378,7 @@ def deploy_windows(
|
|||
if minion_grains:
|
||||
salt.utils.smb.put_str(
|
||||
salt_config_to_yaml(minion_grains, line_break="\r\n"),
|
||||
"{}\\conf\\grains".format(root_dir),
|
||||
f"{root_dir}\\conf\\grains",
|
||||
conn=smb_conn,
|
||||
)
|
||||
# Add special windows minion configuration
|
||||
|
@ -1395,7 +1395,7 @@ def deploy_windows(
|
|||
minion_conf = dict(minion_conf, **windows_minion_conf)
|
||||
salt.utils.smb.put_str(
|
||||
salt_config_to_yaml(minion_conf, line_break="\r\n"),
|
||||
"{}\\conf\\minion".format(root_dir),
|
||||
f"{root_dir}\\conf\\minion",
|
||||
conn=smb_conn,
|
||||
)
|
||||
# Delete C:\salttmp\ and installer file
|
||||
|
@ -1405,7 +1405,7 @@ def deploy_windows(
|
|||
winrm_cmd(winrm_session, "rmdir", ["/Q", "/S", "C:\\salttemp\\"])
|
||||
else:
|
||||
salt.utils.smb.delete_file(
|
||||
"salttemp\\{}".format(installer), "C$", conn=smb_conn
|
||||
f"salttemp\\{installer}", "C$", conn=smb_conn
|
||||
)
|
||||
salt.utils.smb.delete_directory("salttemp", "C$", conn=smb_conn)
|
||||
# Shell out to psexec to ensure salt-minion service started
|
||||
|
@ -1429,8 +1429,8 @@ def deploy_windows(
|
|||
# Fire deploy action
|
||||
fire_event(
|
||||
"event",
|
||||
"{} has been deployed at {}".format(name, host),
|
||||
"salt/cloud/{}/deploy_windows".format(name),
|
||||
f"{name} has been deployed at {host}",
|
||||
f"salt/cloud/{name}/deploy_windows",
|
||||
args={"name": name},
|
||||
sock_dir=opts.get("sock_dir", os.path.join(__opts__["sock_dir"], "master")),
|
||||
transport=opts.get("transport", "zeromq"),
|
||||
|
@ -1480,7 +1480,7 @@ def deploy_script(
|
|||
master_sign_pub_file=None,
|
||||
cloud_grains=None,
|
||||
force_minion_config=False,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Copy a deploy script to a remote server, execute it, and remove it
|
||||
|
@ -1496,7 +1496,7 @@ def deploy_script(
|
|||
)
|
||||
if key_filename is not None and not os.path.isfile(key_filename):
|
||||
raise SaltCloudConfigError(
|
||||
"The defined key_filename '{}' does not exist".format(key_filename)
|
||||
f"The defined key_filename '{key_filename}' does not exist"
|
||||
)
|
||||
|
||||
gateway = None
|
||||
|
@ -1543,35 +1543,28 @@ def deploy_script(
|
|||
ssh_kwargs["password"] = password
|
||||
|
||||
if root_cmd(
|
||||
"test -e '{}'".format(tmp_dir),
|
||||
tty,
|
||||
sudo,
|
||||
allow_failure=True,
|
||||
**ssh_kwargs
|
||||
f"test -e '{tmp_dir}'", tty, sudo, allow_failure=True, **ssh_kwargs
|
||||
):
|
||||
ret = root_cmd(
|
||||
"sh -c \"( mkdir -p -m 700 '{}' )\"".format(tmp_dir),
|
||||
f"sh -c \"( mkdir -p -m 700 '{tmp_dir}' )\"",
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Can't create temporary directory in {} !".format(tmp_dir)
|
||||
f"Can't create temporary directory in {tmp_dir} !"
|
||||
)
|
||||
if sudo:
|
||||
comps = tmp_dir.lstrip("/").rstrip("/").split("/")
|
||||
if comps:
|
||||
if len(comps) > 1 or comps[0] != "tmp":
|
||||
ret = root_cmd(
|
||||
'chown {} "{}"'.format(username, tmp_dir),
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
f'chown {username} "{tmp_dir}"', tty, sudo, **ssh_kwargs
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Cant set {} ownership on {}".format(username, tmp_dir)
|
||||
f"Cant set {username} ownership on {tmp_dir}"
|
||||
)
|
||||
|
||||
if not isinstance(file_map, dict):
|
||||
|
@ -1601,15 +1594,13 @@ def deploy_script(
|
|||
remote_dir = os.path.dirname(remote_file)
|
||||
|
||||
if remote_dir not in remote_dirs:
|
||||
root_cmd(
|
||||
"mkdir -p '{}'".format(remote_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"mkdir -p '{remote_dir}'", tty, sudo, **ssh_kwargs)
|
||||
if ssh_kwargs["username"] != "root":
|
||||
root_cmd(
|
||||
"chown {} '{}'".format(ssh_kwargs["username"], remote_dir),
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
remote_dirs.append(remote_dir)
|
||||
ssh_file(opts, remote_file, kwargs=ssh_kwargs, local_file=local_file)
|
||||
|
@ -1617,21 +1608,21 @@ def deploy_script(
|
|||
|
||||
# Minion configuration
|
||||
if minion_pem:
|
||||
ssh_file(opts, "{}/minion.pem".format(tmp_dir), minion_pem, ssh_kwargs)
|
||||
ssh_file(opts, f"{tmp_dir}/minion.pem", minion_pem, ssh_kwargs)
|
||||
ret = root_cmd(
|
||||
"chmod 600 '{}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
f"chmod 600 '{tmp_dir}/minion.pem'", tty, sudo, **ssh_kwargs
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Can't set perms on {}/minion.pem".format(tmp_dir)
|
||||
f"Can't set perms on {tmp_dir}/minion.pem"
|
||||
)
|
||||
if minion_pub:
|
||||
ssh_file(opts, "{}/minion.pub".format(tmp_dir), minion_pub, ssh_kwargs)
|
||||
ssh_file(opts, f"{tmp_dir}/minion.pub", minion_pub, ssh_kwargs)
|
||||
|
||||
if master_sign_pub_file:
|
||||
ssh_file(
|
||||
opts,
|
||||
"{}/master_sign.pub".format(tmp_dir),
|
||||
f"{tmp_dir}/master_sign.pub",
|
||||
kwargs=ssh_kwargs,
|
||||
local_file=master_sign_pub_file,
|
||||
)
|
||||
|
@ -1649,7 +1640,7 @@ def deploy_script(
|
|||
if minion_grains:
|
||||
ssh_file(
|
||||
opts,
|
||||
"{}/grains".format(tmp_dir),
|
||||
f"{tmp_dir}/grains",
|
||||
salt_config_to_yaml(minion_grains),
|
||||
ssh_kwargs,
|
||||
)
|
||||
|
@ -1657,24 +1648,22 @@ def deploy_script(
|
|||
minion_conf["grains"] = {"salt-cloud": cloud_grains}
|
||||
ssh_file(
|
||||
opts,
|
||||
"{}/minion".format(tmp_dir),
|
||||
f"{tmp_dir}/minion",
|
||||
salt_config_to_yaml(minion_conf),
|
||||
ssh_kwargs,
|
||||
)
|
||||
|
||||
# Master configuration
|
||||
if master_pem:
|
||||
ssh_file(opts, "{}/master.pem".format(tmp_dir), master_pem, ssh_kwargs)
|
||||
ssh_file(opts, f"{tmp_dir}/master.pem", master_pem, ssh_kwargs)
|
||||
ret = root_cmd(
|
||||
"chmod 600 '{}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
f"chmod 600 '{tmp_dir}/master.pem'", tty, sudo, **ssh_kwargs
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Cant set perms on {}/master.pem".format(tmp_dir)
|
||||
)
|
||||
raise SaltCloudSystemExit(f"Cant set perms on {tmp_dir}/master.pem")
|
||||
|
||||
if master_pub:
|
||||
ssh_file(opts, "{}/master.pub".format(tmp_dir), master_pub, ssh_kwargs)
|
||||
ssh_file(opts, f"{tmp_dir}/master.pub", master_pub, ssh_kwargs)
|
||||
|
||||
if master_conf:
|
||||
if not isinstance(master_conf, dict):
|
||||
|
@ -1688,34 +1677,31 @@ def deploy_script(
|
|||
|
||||
ssh_file(
|
||||
opts,
|
||||
"{}/master".format(tmp_dir),
|
||||
f"{tmp_dir}/master",
|
||||
salt_config_to_yaml(master_conf),
|
||||
ssh_kwargs,
|
||||
)
|
||||
|
||||
# XXX: We need to make these paths configurable
|
||||
preseed_minion_keys_tempdir = "{}/preseed-minion-keys".format(tmp_dir)
|
||||
preseed_minion_keys_tempdir = f"{tmp_dir}/preseed-minion-keys"
|
||||
if preseed_minion_keys is not None:
|
||||
# Create remote temp dir
|
||||
ret = root_cmd(
|
||||
"mkdir '{}'".format(preseed_minion_keys_tempdir),
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
f"mkdir '{preseed_minion_keys_tempdir}'", tty, sudo, **ssh_kwargs
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Cant create {}".format(preseed_minion_keys_tempdir)
|
||||
f"Cant create {preseed_minion_keys_tempdir}"
|
||||
)
|
||||
ret = root_cmd(
|
||||
"chmod 700 '{}'".format(preseed_minion_keys_tempdir),
|
||||
f"chmod 700 '{preseed_minion_keys_tempdir}'",
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Can't set perms on {}".format(preseed_minion_keys_tempdir)
|
||||
f"Can't set perms on {preseed_minion_keys_tempdir}"
|
||||
)
|
||||
if ssh_kwargs["username"] != "root":
|
||||
root_cmd(
|
||||
|
@ -1724,7 +1710,7 @@ def deploy_script(
|
|||
),
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
|
||||
# Copy pre-seed minion keys
|
||||
|
@ -1734,10 +1720,10 @@ def deploy_script(
|
|||
|
||||
if ssh_kwargs["username"] != "root":
|
||||
root_cmd(
|
||||
"chown -R root '{}'".format(preseed_minion_keys_tempdir),
|
||||
f"chown -R root '{preseed_minion_keys_tempdir}'",
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -1751,25 +1737,21 @@ def deploy_script(
|
|||
for command in preflight_cmds:
|
||||
cmd_ret = root_cmd(command, tty, sudo, **ssh_kwargs)
|
||||
if cmd_ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Pre-flight command failed: '{}'".format(command)
|
||||
)
|
||||
raise SaltCloudSystemExit(f"Pre-flight command failed: '{command}'")
|
||||
|
||||
# The actual deploy script
|
||||
if script:
|
||||
# got strange escaping issues with sudoer, going onto a
|
||||
# subshell fixes that
|
||||
ssh_file(opts, "{}/deploy.sh".format(tmp_dir), script, ssh_kwargs)
|
||||
ssh_file(opts, f"{tmp_dir}/deploy.sh", script, ssh_kwargs)
|
||||
ret = root_cmd(
|
||||
"sh -c \"( chmod +x '{}/deploy.sh' )\";exit $?".format(tmp_dir),
|
||||
f"sh -c \"( chmod +x '{tmp_dir}/deploy.sh' )\";exit $?",
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
if ret:
|
||||
raise SaltCloudSystemExit(
|
||||
"Can't set perms on {}/deploy.sh".format(tmp_dir)
|
||||
)
|
||||
raise SaltCloudSystemExit(f"Can't set perms on {tmp_dir}/deploy.sh")
|
||||
|
||||
time_used = time.mktime(time.localtime()) - time.mktime(starttime)
|
||||
newtimeout = timeout - time_used
|
||||
|
@ -1785,7 +1767,7 @@ def deploy_script(
|
|||
kwargs=dict(
|
||||
name=name, sock_dir=sock_dir, timeout=newtimeout, queue=queue
|
||||
),
|
||||
name="DeployScriptCheckAuth({})".format(name),
|
||||
name=f"DeployScriptCheckAuth({name})",
|
||||
)
|
||||
log.debug("Starting new process to wait for salt-minion")
|
||||
process.start()
|
||||
|
@ -1793,7 +1775,7 @@ def deploy_script(
|
|||
# Run the deploy script
|
||||
if script:
|
||||
if "bootstrap-salt" in script:
|
||||
deploy_command += " -c '{}'".format(tmp_dir)
|
||||
deploy_command += f" -c '{tmp_dir}'"
|
||||
if force_minion_config:
|
||||
deploy_command += " -F"
|
||||
if make_syndic is True:
|
||||
|
@ -1805,9 +1787,9 @@ def deploy_script(
|
|||
if keep_tmp is True:
|
||||
deploy_command += " -K"
|
||||
if preseed_minion_keys is not None:
|
||||
deploy_command += " -k '{}'".format(preseed_minion_keys_tempdir)
|
||||
deploy_command += f" -k '{preseed_minion_keys_tempdir}'"
|
||||
if script_args:
|
||||
deploy_command += " {}".format(script_args)
|
||||
deploy_command += f" {script_args}"
|
||||
|
||||
if script_env:
|
||||
if not isinstance(script_env, dict):
|
||||
|
@ -1826,15 +1808,15 @@ def deploy_script(
|
|||
# Upload our environ setter wrapper
|
||||
ssh_file(
|
||||
opts,
|
||||
"{}/environ-deploy-wrapper.sh".format(tmp_dir),
|
||||
f"{tmp_dir}/environ-deploy-wrapper.sh",
|
||||
"\n".join(environ_script_contents),
|
||||
ssh_kwargs,
|
||||
)
|
||||
root_cmd(
|
||||
"chmod +x '{}/environ-deploy-wrapper.sh'".format(tmp_dir),
|
||||
f"chmod +x '{tmp_dir}/environ-deploy-wrapper.sh'",
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
# The deploy command is now our wrapper
|
||||
deploy_command = "'{}/environ-deploy-wrapper.sh'".format(
|
||||
|
@ -1842,22 +1824,20 @@ def deploy_script(
|
|||
)
|
||||
if root_cmd(deploy_command, tty, sudo, **ssh_kwargs) != 0:
|
||||
raise SaltCloudSystemExit(
|
||||
"Executing the command '{}' failed".format(deploy_command)
|
||||
f"Executing the command '{deploy_command}' failed"
|
||||
)
|
||||
log.debug("Executed command '%s'", deploy_command)
|
||||
|
||||
# Remove the deploy script
|
||||
if not keep_tmp:
|
||||
root_cmd(
|
||||
"rm -f '{}/deploy.sh'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/deploy.sh'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/deploy.sh", tmp_dir)
|
||||
if script_env:
|
||||
root_cmd(
|
||||
"rm -f '{}/environ-deploy-wrapper.sh'".format(tmp_dir),
|
||||
f"rm -f '{tmp_dir}/environ-deploy-wrapper.sh'",
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
log.debug("Removed %s/environ-deploy-wrapper.sh", tmp_dir)
|
||||
|
||||
|
@ -1866,57 +1846,40 @@ def deploy_script(
|
|||
else:
|
||||
# Remove minion configuration
|
||||
if minion_pub:
|
||||
root_cmd(
|
||||
"rm -f '{}/minion.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/minion.pub'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/minion.pub", tmp_dir)
|
||||
if minion_pem:
|
||||
root_cmd(
|
||||
"rm -f '{}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/minion.pem'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/minion.pem", tmp_dir)
|
||||
if minion_conf:
|
||||
root_cmd(
|
||||
"rm -f '{}/grains'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/grains'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/grains", tmp_dir)
|
||||
root_cmd(
|
||||
"rm -f '{}/minion'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/minion'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/minion", tmp_dir)
|
||||
if master_sign_pub_file:
|
||||
root_cmd(
|
||||
"rm -f {}/master_sign.pub".format(tmp_dir),
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
f"rm -f {tmp_dir}/master_sign.pub", tty, sudo, **ssh_kwargs
|
||||
)
|
||||
log.debug("Removed %s/master_sign.pub", tmp_dir)
|
||||
|
||||
# Remove master configuration
|
||||
if master_pub:
|
||||
root_cmd(
|
||||
"rm -f '{}/master.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/master.pub'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/master.pub", tmp_dir)
|
||||
if master_pem:
|
||||
root_cmd(
|
||||
"rm -f '{}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/master.pem'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/master.pem", tmp_dir)
|
||||
if master_conf:
|
||||
root_cmd(
|
||||
"rm -f '{}/master'".format(tmp_dir), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"rm -f '{tmp_dir}/master'", tty, sudo, **ssh_kwargs)
|
||||
log.debug("Removed %s/master", tmp_dir)
|
||||
|
||||
# Remove pre-seed keys directory
|
||||
if preseed_minion_keys is not None:
|
||||
root_cmd(
|
||||
"rm -rf '{}'".format(preseed_minion_keys_tempdir),
|
||||
f"rm -rf '{preseed_minion_keys_tempdir}'",
|
||||
tty,
|
||||
sudo,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
log.debug("Removed %s", preseed_minion_keys_tempdir)
|
||||
|
||||
|
@ -1931,15 +1894,13 @@ def deploy_script(
|
|||
# for line in output:
|
||||
# print(line)
|
||||
log.info("Executing %s on the salt-minion", start_action)
|
||||
root_cmd(
|
||||
"salt-call {}".format(start_action), tty, sudo, **ssh_kwargs
|
||||
)
|
||||
root_cmd(f"salt-call {start_action}", tty, sudo, **ssh_kwargs)
|
||||
log.info("Finished executing %s on the salt-minion", start_action)
|
||||
# Fire deploy action
|
||||
fire_event(
|
||||
"event",
|
||||
"{} has been deployed at {}".format(name, host),
|
||||
"salt/cloud/{}/deploy_script".format(name),
|
||||
f"{name} has been deployed at {host}",
|
||||
f"salt/cloud/{name}/deploy_script",
|
||||
args={"name": name, "host": host},
|
||||
sock_dir=opts.get(
|
||||
"sock_dir", os.path.join(__opts__["sock_dir"], "master")
|
||||
|
@ -1972,7 +1933,7 @@ def run_inline_script(
|
|||
tty=None,
|
||||
opts=None,
|
||||
tmp_dir="/tmp/.saltcloud-inline_script",
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Run the inline script commands, one by one
|
||||
|
@ -2029,11 +1990,11 @@ def run_inline_script(
|
|||
# TODO: check edge cases (e.g. ssh gateways, salt deploy disabled, etc.)
|
||||
if (
|
||||
root_cmd(
|
||||
'test -e \\"{}\\"'.format(tmp_dir),
|
||||
f'test -e \\"{tmp_dir}\\"',
|
||||
tty,
|
||||
sudo,
|
||||
allow_failure=True,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
and inline_script
|
||||
):
|
||||
|
@ -2041,11 +2002,11 @@ def run_inline_script(
|
|||
for cmd_line in inline_script:
|
||||
log.info("Executing inline command: %s", cmd_line)
|
||||
ret = root_cmd(
|
||||
'sh -c "( {} )"'.format(cmd_line),
|
||||
f'sh -c "( {cmd_line} )"',
|
||||
tty,
|
||||
sudo,
|
||||
allow_failure=True,
|
||||
**ssh_kwargs
|
||||
**ssh_kwargs,
|
||||
)
|
||||
if ret:
|
||||
log.info("[%s] Output: %s", cmd_line, ret)
|
||||
|
@ -2149,7 +2110,7 @@ def _exec_ssh_cmd(cmd, error_msg=None, allow_failure=False, **kwargs):
|
|||
time.sleep(0.5)
|
||||
if proc.exitstatus != 0 and allow_failure is False:
|
||||
raise SaltCloudSystemExit(
|
||||
"Command '{}' failed. Exit code: {}".format(cmd, proc.exitstatus)
|
||||
f"Command '{cmd}' failed. Exit code: {proc.exitstatus}"
|
||||
)
|
||||
return proc.exitstatus
|
||||
except salt.utils.vt.TerminalException as err:
|
||||
|
@ -2252,7 +2213,7 @@ def scp_file(dest_path, contents=None, kwargs=None, local_file=None):
|
|||
cmd,
|
||||
error_msg="Failed to upload file '{0}': {1}\n{2}",
|
||||
password_retries=3,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
finally:
|
||||
if contents is not None:
|
||||
|
@ -2370,7 +2331,7 @@ def sftp_file(dest_path, contents=None, kwargs=None, local_file=None):
|
|||
cmd,
|
||||
error_msg="Failed to upload file '{0}': {1}\n{2}",
|
||||
password_retries=3,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
finally:
|
||||
if contents is not None:
|
||||
|
@ -2430,11 +2391,11 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs):
|
|||
|
||||
if sudo:
|
||||
if sudo_password is None:
|
||||
command = "sudo {}".format(command)
|
||||
command = f"sudo {command}"
|
||||
logging_command = command
|
||||
else:
|
||||
logging_command = 'sudo -S "XXX-REDACTED-XXX" {}'.format(command)
|
||||
command = "sudo -S {}".format(command)
|
||||
logging_command = f'sudo -S "XXX-REDACTED-XXX" {command}'
|
||||
command = f"sudo -S {command}"
|
||||
|
||||
log.debug("Using sudo to run command %s", logging_command)
|
||||
|
||||
|
@ -2453,9 +2414,9 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs):
|
|||
ssh_args.extend(
|
||||
[
|
||||
# Don't add new hosts to the host key database
|
||||
"-oStrictHostKeyChecking={}".format(host_key_checking),
|
||||
f"-oStrictHostKeyChecking={host_key_checking}",
|
||||
# Set hosts key database path to /dev/null, i.e., non-existing
|
||||
"-oUserKnownHostsFile={}".format(known_hosts_file),
|
||||
f"-oUserKnownHostsFile={known_hosts_file}",
|
||||
# Don't re-use the SSH connection. Less failures.
|
||||
"-oControlPath=none",
|
||||
]
|
||||
|
@ -2488,12 +2449,12 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs):
|
|||
|
||||
cmd = "ssh {0} {1[username]}@{1[hostname]} ".format(" ".join(ssh_args), kwargs)
|
||||
logging_command = cmd + logging_command
|
||||
cmd = cmd + pipes.quote(command)
|
||||
cmd = cmd + shlex.quote(command)
|
||||
|
||||
hard_timeout = kwargs.get("hard_timeout")
|
||||
if hard_timeout is not None:
|
||||
logging_command = "timeout {} {}".format(hard_timeout, logging_command)
|
||||
cmd = "timeout {} {}".format(hard_timeout, cmd)
|
||||
logging_command = f"timeout {hard_timeout} {logging_command}"
|
||||
cmd = f"timeout {hard_timeout} {cmd}"
|
||||
|
||||
log.debug("SSH command: '%s'", logging_command)
|
||||
|
||||
|
@ -2515,7 +2476,7 @@ def check_auth(name, sock_dir=None, queue=None, timeout=300):
|
|||
ret = event.get_event(full=True)
|
||||
if ret is None:
|
||||
continue
|
||||
if ret["tag"] == "salt/minion/{}/start".format(name):
|
||||
if ret["tag"] == f"salt/minion/{name}/start":
|
||||
queue.put(name)
|
||||
newtimeout = 0
|
||||
log.debug("Minion %s is ready to receive commands", name)
|
||||
|
@ -2561,7 +2522,7 @@ def check_name(name, safe_chars):
|
|||
"""
|
||||
Check whether the specified name contains invalid characters
|
||||
"""
|
||||
regexp = re.compile("[^{}]".format(safe_chars))
|
||||
regexp = re.compile(f"[^{safe_chars}]")
|
||||
if regexp.search(name):
|
||||
raise SaltCloudException(
|
||||
"{} contains characters not supported by this cloud provider. "
|
||||
|
@ -2855,7 +2816,7 @@ def request_minion_cachedir(
|
|||
"provider": provider,
|
||||
}
|
||||
|
||||
fname = "{}.p".format(minion_id)
|
||||
fname = f"{minion_id}.p"
|
||||
path = os.path.join(base, "requested", fname)
|
||||
with salt.utils.files.fopen(path, "wb") as fh_:
|
||||
salt.utils.msgpack.dump(data, fh_, encoding=MSGPACK_ENCODING)
|
||||
|
@ -2886,7 +2847,7 @@ def change_minion_cachedir(
|
|||
if base is None:
|
||||
base = __opts__["cachedir"]
|
||||
|
||||
fname = "{}.p".format(minion_id)
|
||||
fname = f"{minion_id}.p"
|
||||
path = os.path.join(base, cachedir, fname)
|
||||
|
||||
with salt.utils.files.fopen(path, "r") as fh_:
|
||||
|
@ -2909,7 +2870,7 @@ def activate_minion_cachedir(minion_id, base=None):
|
|||
if base is None:
|
||||
base = __opts__["cachedir"]
|
||||
|
||||
fname = "{}.p".format(minion_id)
|
||||
fname = f"{minion_id}.p"
|
||||
src = os.path.join(base, "requested", fname)
|
||||
dst = os.path.join(base, "active")
|
||||
shutil.move(src, dst)
|
||||
|
@ -2931,7 +2892,7 @@ def delete_minion_cachedir(minion_id, provider, opts, base=None):
|
|||
base = __opts__["cachedir"]
|
||||
|
||||
driver = next(iter(__opts__["providers"][provider].keys()))
|
||||
fname = "{}.p".format(minion_id)
|
||||
fname = f"{minion_id}.p"
|
||||
for cachedir in "requested", "active":
|
||||
path = os.path.join(base, cachedir, driver, provider, fname)
|
||||
log.debug("path: %s", path)
|
||||
|
@ -3024,7 +2985,7 @@ def update_bootstrap(config, url=None):
|
|||
# in last case, assuming we got a script content
|
||||
else:
|
||||
script_content = url
|
||||
script_name = "{}.sh".format(hashlib.sha1(script_content).hexdigest())
|
||||
script_name = f"{hashlib.sha1(script_content).hexdigest()}.sh"
|
||||
|
||||
if not script_content:
|
||||
raise ValueError("No content in bootstrap script !")
|
||||
|
@ -3118,7 +3079,7 @@ def cache_node_list(nodes, provider, opts):
|
|||
|
||||
for node in nodes:
|
||||
diff_node_cache(prov_dir, node, nodes[node], opts)
|
||||
path = os.path.join(prov_dir, "{}.p".format(node))
|
||||
path = os.path.join(prov_dir, f"{node}.p")
|
||||
with salt.utils.files.fopen(path, "wb") as fh_:
|
||||
salt.utils.msgpack.dump(nodes[node], fh_, encoding=MSGPACK_ENCODING)
|
||||
|
||||
|
@ -3173,7 +3134,7 @@ def missing_node_cache(prov_dir, node_list, provider, opts):
|
|||
fire_event(
|
||||
"event",
|
||||
"cached node missing from provider",
|
||||
"salt/cloud/{}/cache_node_missing".format(node),
|
||||
f"salt/cloud/{node}/cache_node_missing",
|
||||
args={"missing node": node},
|
||||
sock_dir=opts.get(
|
||||
"sock_dir", os.path.join(__opts__["sock_dir"], "master")
|
||||
|
@ -3201,7 +3162,7 @@ def diff_node_cache(prov_dir, node, new_data, opts):
|
|||
|
||||
if node is None:
|
||||
return
|
||||
path = "{}.p".format(os.path.join(prov_dir, node))
|
||||
path = f"{os.path.join(prov_dir, node)}.p"
|
||||
|
||||
if not os.path.exists(path):
|
||||
event_data = _strip_cache_events(new_data, opts)
|
||||
|
@ -3209,7 +3170,7 @@ def diff_node_cache(prov_dir, node, new_data, opts):
|
|||
fire_event(
|
||||
"event",
|
||||
"new node found",
|
||||
"salt/cloud/{}/cache_node_new".format(node),
|
||||
f"salt/cloud/{node}/cache_node_new",
|
||||
args={"new_data": event_data},
|
||||
sock_dir=opts.get("sock_dir", os.path.join(__opts__["sock_dir"], "master")),
|
||||
transport=opts.get("transport", "zeromq"),
|
||||
|
@ -3233,7 +3194,7 @@ def diff_node_cache(prov_dir, node, new_data, opts):
|
|||
fire_event(
|
||||
"event",
|
||||
"node data differs",
|
||||
"salt/cloud/{}/cache_node_diff".format(node),
|
||||
f"salt/cloud/{node}/cache_node_diff",
|
||||
args={
|
||||
"new_data": _strip_cache_events(new_data, opts),
|
||||
"cache_data": _strip_cache_events(cache_data, opts),
|
||||
|
@ -3277,7 +3238,7 @@ def _salt_cloud_force_ascii(exc):
|
|||
errors.
|
||||
"""
|
||||
if not isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
|
||||
raise TypeError("Can't handle {}".format(exc))
|
||||
raise TypeError(f"Can't handle {exc}")
|
||||
|
||||
unicode_trans = {
|
||||
# Convert non-breaking space to space
|
||||
|
@ -3337,7 +3298,7 @@ def store_password_in_keyring(credential_id, username, password=None):
|
|||
|
||||
# pylint: enable=import-error
|
||||
if password is None:
|
||||
prompt = "Please enter password for {}: ".format(credential_id)
|
||||
prompt = f"Please enter password for {credential_id}: "
|
||||
try:
|
||||
password = getpass.getpass(prompt)
|
||||
except EOFError:
|
||||
|
|
|
@ -143,7 +143,7 @@ class EtcdBase:
|
|||
ca=None,
|
||||
client_key=None,
|
||||
client_cert=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
if not kwargs.get("has_etcd_opts", False):
|
||||
etcd_opts = _get_etcd_opts(opts, profile)
|
||||
|
@ -201,7 +201,7 @@ class EtcdBase:
|
|||
wait=False,
|
||||
timeout=None,
|
||||
start_revision=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Read a value of a key.
|
||||
|
@ -242,9 +242,9 @@ class EtcdBase:
|
|||
for k, v in data.items():
|
||||
k = k.strip("/")
|
||||
if path:
|
||||
p = "/{}/{}".format(path, k)
|
||||
p = f"/{path}/{k}"
|
||||
else:
|
||||
p = "/{}".format(k)
|
||||
p = f"/{k}"
|
||||
if isinstance(v, dict):
|
||||
ret = self._flatten(v, p)
|
||||
flat.update(ret)
|
||||
|
@ -357,7 +357,7 @@ class EtcdClient(EtcdBase):
|
|||
index = kwargs.pop("index", None)
|
||||
if index is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The index kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use start_revision instead.",
|
||||
)
|
||||
|
@ -441,20 +441,20 @@ class EtcdClient(EtcdBase):
|
|||
wait=False,
|
||||
timeout=None,
|
||||
start_revision=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
recursive = kwargs.pop("recursive", None)
|
||||
wait_index = kwargs.pop("waitIndex", None)
|
||||
if recursive is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The recursive kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use recurse instead.",
|
||||
)
|
||||
recurse = recursive
|
||||
if wait_index is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The waitIndex kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use start_revision instead.",
|
||||
)
|
||||
|
@ -487,7 +487,7 @@ class EtcdClient(EtcdBase):
|
|||
if wait:
|
||||
# Wait timeouts will throw ReadTimeoutError, which isn't bad
|
||||
log.debug("etcd: Timed out while executing a wait")
|
||||
raise EtcdUtilWatchTimeout("Watch on {} timed out".format(key))
|
||||
raise EtcdUtilWatchTimeout(f"Watch on {key} timed out")
|
||||
log.error("etcd: Timed out")
|
||||
raise etcd.EtcdConnectionFailed("Connection failed")
|
||||
except MaxRetryError as err:
|
||||
|
@ -601,7 +601,7 @@ class EtcdClient(EtcdBase):
|
|||
if item.dir is True:
|
||||
if item.key == path:
|
||||
continue
|
||||
dir_name = "{}/".format(item.key)
|
||||
dir_name = f"{item.key}/"
|
||||
ret[dir_name] = {}
|
||||
else:
|
||||
ret[item.key] = item.value
|
||||
|
@ -611,7 +611,7 @@ class EtcdClient(EtcdBase):
|
|||
recursive = kwargs.pop("recursive", None)
|
||||
if recursive is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The recursive kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use recurse instead.",
|
||||
)
|
||||
|
@ -677,7 +677,7 @@ class EtcdClientV3(EtcdBase):
|
|||
raw_keys=False,
|
||||
raw_values=False,
|
||||
unicode_errors=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
if not HAS_ETCD_V3:
|
||||
raise EtcdLibraryNotInstalled("Don't have etcd3-py, need to install it.")
|
||||
|
@ -740,7 +740,7 @@ class EtcdClientV3(EtcdBase):
|
|||
index = kwargs.pop("index", None)
|
||||
if index is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The index kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use start_revision instead.",
|
||||
)
|
||||
|
@ -780,20 +780,20 @@ class EtcdClientV3(EtcdBase):
|
|||
wait=False,
|
||||
timeout=None,
|
||||
start_revision=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
recursive = kwargs.pop("recursive", None)
|
||||
wait_index = kwargs.pop("waitIndex", None)
|
||||
if recursive is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The recursive kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use recurse instead.",
|
||||
)
|
||||
recurse = recursive
|
||||
if wait_index is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The waitIndex kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use start_revision instead.",
|
||||
)
|
||||
|
@ -890,7 +890,7 @@ class EtcdClientV3(EtcdBase):
|
|||
recursive = kwargs.pop("recursive", None)
|
||||
if recursive is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The recursive kwarg has been deprecated, and will be removed "
|
||||
"in the Argon release. Please use recurse instead.",
|
||||
)
|
||||
|
@ -983,7 +983,7 @@ def get_conn(opts, profile=None, **kwargs):
|
|||
use_v2 = conf.get("etcd.require_v2", True)
|
||||
if use_v2:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
"etcd API v2 has been deprecated. It will be removed in "
|
||||
"the Potassium release, and etcd API v3 will be the default.",
|
||||
)
|
||||
|
|
|
@ -106,7 +106,6 @@ TAGS = {
|
|||
def get_event(
|
||||
node,
|
||||
sock_dir=None,
|
||||
transport=None,
|
||||
opts=None,
|
||||
listen=True,
|
||||
io_loop=None,
|
||||
|
@ -121,13 +120,6 @@ def get_event(
|
|||
set_event_handler() API. Otherwise, operation
|
||||
will be synchronous.
|
||||
"""
|
||||
if transport:
|
||||
salt.utils.versions.warn_until(
|
||||
"Chlorine",
|
||||
"The 'transport' kwarg has been deprecated and it will be removed "
|
||||
"in the Chlorine release, as such, its usage is no longer required.",
|
||||
)
|
||||
|
||||
sock_dir = sock_dir or opts["sock_dir"]
|
||||
# TODO: AIO core is separate from transport
|
||||
if node == "master":
|
||||
|
|
|
@ -80,7 +80,7 @@ def alias_function(fun, name, doc=None):
|
|||
alias_fun.__doc__ = doc
|
||||
else:
|
||||
orig_name = fun.__name__
|
||||
alias_msg = "\nThis function is an alias of ``{}``.\n".format(orig_name)
|
||||
alias_msg = f"\nThis function is an alias of ``{orig_name}``.\n"
|
||||
alias_fun.__doc__ = alias_msg + (fun.__doc__ or "")
|
||||
|
||||
return alias_fun
|
||||
|
|
|
@ -5,7 +5,7 @@ and the like, but also useful for basic HTTP testing.
|
|||
.. versionadded:: 2015.5.0
|
||||
"""
|
||||
|
||||
import cgi
|
||||
import email.message
|
||||
import gzip
|
||||
import http.client
|
||||
import http.cookiejar
|
||||
|
@ -85,7 +85,7 @@ except ImportError:
|
|||
HAS_CERTIFI = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
USERAGENT = "Salt/{}".format(salt.version.__version__)
|
||||
USERAGENT = f"Salt/{salt.version.__version__}"
|
||||
|
||||
|
||||
def __decompressContent(coding, pgctnt):
|
||||
|
@ -171,7 +171,7 @@ def query(
|
|||
formdata_fieldname=None,
|
||||
formdata_filename=None,
|
||||
decode_body=True,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Query a resource, and decode the return data
|
||||
|
@ -296,7 +296,7 @@ def query(
|
|||
auth = (username, password)
|
||||
|
||||
if agent == USERAGENT:
|
||||
agent = "{} http.query()".format(agent)
|
||||
agent = f"{agent} http.query()"
|
||||
header_dict["User-agent"] = agent
|
||||
|
||||
if backend == "requests":
|
||||
|
@ -361,14 +361,14 @@ def query(
|
|||
url,
|
||||
params=params,
|
||||
files={formdata_fieldname: (formdata_filename, io.StringIO(data))},
|
||||
**req_kwargs
|
||||
**req_kwargs,
|
||||
)
|
||||
else:
|
||||
result = sess.request(method, url, params=params, data=data, **req_kwargs)
|
||||
result.raise_for_status()
|
||||
if stream is True:
|
||||
# fake a HTTP response header
|
||||
header_callback("HTTP/1.0 {} MESSAGE".format(result.status_code))
|
||||
header_callback(f"HTTP/1.0 {result.status_code} MESSAGE")
|
||||
# fake streaming the content
|
||||
streaming_callback(result.content)
|
||||
return {
|
||||
|
@ -484,15 +484,12 @@ def query(
|
|||
result_headers = dict(result.info())
|
||||
result_text = result.read()
|
||||
if "Content-Type" in result_headers:
|
||||
res_content_type, res_params = cgi.parse_header(
|
||||
result_headers["Content-Type"]
|
||||
)
|
||||
if (
|
||||
res_content_type.startswith("text/")
|
||||
and "charset" in res_params
|
||||
and not isinstance(result_text, str)
|
||||
):
|
||||
result_text = result_text.decode(res_params["charset"])
|
||||
msg = email.message.EmailMessage()
|
||||
msg.add_header("Content-Type", result_headers["Content-Type"])
|
||||
if msg.get_content_type().startswith("text/"):
|
||||
content_charset = msg.get_content_charset()
|
||||
if content_charset and not isinstance(result_text, str):
|
||||
result_text = result_text.decode(content_charset)
|
||||
if isinstance(result_text, bytes) and decode_body:
|
||||
result_text = result_text.decode("utf-8")
|
||||
ret["body"] = result_text
|
||||
|
@ -637,15 +634,12 @@ def query(
|
|||
result_headers = result.headers
|
||||
result_text = result.body
|
||||
if "Content-Type" in result_headers:
|
||||
res_content_type, res_params = cgi.parse_header(
|
||||
result_headers["Content-Type"]
|
||||
)
|
||||
if (
|
||||
res_content_type.startswith("text/")
|
||||
and "charset" in res_params
|
||||
and not isinstance(result_text, str)
|
||||
):
|
||||
result_text = result_text.decode(res_params["charset"])
|
||||
msg = email.message.EmailMessage()
|
||||
msg.add_header("Content-Type", result_headers["Content-Type"])
|
||||
if msg.get_content_type().startswith("text/"):
|
||||
content_charset = msg.get_content_charset()
|
||||
if content_charset and not isinstance(result_text, str):
|
||||
result_text = result_text.decode(content_charset)
|
||||
if isinstance(result_text, bytes) and decode_body:
|
||||
result_text = result_text.decode("utf-8")
|
||||
ret["body"] = result_text
|
||||
|
@ -1039,12 +1033,12 @@ def _sanitize_url_components(comp_list, field):
|
|||
"""
|
||||
if not comp_list:
|
||||
return ""
|
||||
elif comp_list[0].startswith("{}=".format(field)):
|
||||
ret = "{}=XXXXXXXXXX&".format(field)
|
||||
elif comp_list[0].startswith(f"{field}="):
|
||||
ret = f"{field}=XXXXXXXXXX&"
|
||||
comp_list.remove(comp_list[0])
|
||||
return ret + _sanitize_url_components(comp_list, field)
|
||||
else:
|
||||
ret = "{}&".format(comp_list[0])
|
||||
ret = f"{comp_list[0]}&"
|
||||
comp_list.remove(comp_list[0])
|
||||
return ret + _sanitize_url_components(comp_list, field)
|
||||
|
||||
|
|
|
@ -2,13 +2,12 @@
|
|||
Jinja loading utils to enable a more powerful backend for jinja templates
|
||||
"""
|
||||
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
import os.path
|
||||
import pipes
|
||||
import pprint
|
||||
import re
|
||||
import shlex
|
||||
import time
|
||||
import uuid
|
||||
import warnings
|
||||
|
@ -242,11 +241,11 @@ class PrintableDict(OrderedDict):
|
|||
if isinstance(value, str):
|
||||
# keeps quotes around strings
|
||||
# pylint: disable=repr-flag-used-in-string
|
||||
output.append("{!r}: {!r}".format(key, value))
|
||||
output.append(f"{key!r}: {value!r}")
|
||||
# pylint: enable=repr-flag-used-in-string
|
||||
else:
|
||||
# let default output
|
||||
output.append("{!r}: {!s}".format(key, value))
|
||||
output.append(f"{key!r}: {value!s}")
|
||||
return "{" + ", ".join(output) + "}"
|
||||
|
||||
def __repr__(self): # pylint: disable=W0221
|
||||
|
@ -255,7 +254,7 @@ class PrintableDict(OrderedDict):
|
|||
# Raw string formatter required here because this is a repr
|
||||
# function.
|
||||
# pylint: disable=repr-flag-used-in-string
|
||||
output.append("{!r}: {!r}".format(key, value))
|
||||
output.append(f"{key!r}: {value!r}")
|
||||
# pylint: enable=repr-flag-used-in-string
|
||||
return "{" + ", ".join(output) + "}"
|
||||
|
||||
|
@ -441,7 +440,7 @@ def quote(txt):
|
|||
|
||||
'my_text'
|
||||
"""
|
||||
return pipes.quote(txt)
|
||||
return shlex.quote(txt)
|
||||
|
||||
|
||||
@jinja_filter()
|
||||
|
@ -1095,13 +1094,13 @@ class SerializerExtension(Extension):
|
|||
# to the stringified version of the exception.
|
||||
msg += str(exc)
|
||||
else:
|
||||
msg += "{}\n".format(problem)
|
||||
msg += f"{problem}\n"
|
||||
msg += salt.utils.stringutils.get_context(
|
||||
buf, line, marker=" <======================"
|
||||
)
|
||||
raise TemplateRuntimeError(msg)
|
||||
except AttributeError:
|
||||
raise TemplateRuntimeError("Unable to load yaml from {}".format(value))
|
||||
raise TemplateRuntimeError(f"Unable to load yaml from {value}")
|
||||
|
||||
def load_json(self, value):
|
||||
if isinstance(value, TemplateModule):
|
||||
|
@ -1109,7 +1108,7 @@ class SerializerExtension(Extension):
|
|||
try:
|
||||
return salt.utils.json.loads(value)
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
raise TemplateRuntimeError("Unable to load json from {}".format(value))
|
||||
raise TemplateRuntimeError(f"Unable to load json from {value}")
|
||||
|
||||
def load_text(self, value):
|
||||
if isinstance(value, TemplateModule):
|
||||
|
@ -1144,7 +1143,7 @@ class SerializerExtension(Extension):
|
|||
return self._parse_profile_block(parser, label, "profile block", body, lineno)
|
||||
|
||||
def _create_profile_id(self, parser):
|
||||
return "_salt_profile_{}".format(parser.free_identifier().name)
|
||||
return f"_salt_profile_{parser.free_identifier().name}"
|
||||
|
||||
def _profile_start(self, label, source):
|
||||
return (label, source, time.time())
|
||||
|
@ -1186,7 +1185,7 @@ class SerializerExtension(Extension):
|
|||
filter_name = parser.stream.current.value
|
||||
lineno = next(parser.stream).lineno
|
||||
if filter_name not in self.environment.filters:
|
||||
parser.fail("Unable to parse {}".format(filter_name), lineno)
|
||||
parser.fail(f"Unable to parse {filter_name}", lineno)
|
||||
|
||||
parser.stream.expect("name:as")
|
||||
target = parser.parse_assign_target()
|
||||
|
@ -1225,7 +1224,7 @@ class SerializerExtension(Extension):
|
|||
nodes.Name(target, "store").set_lineno(lineno),
|
||||
nodes.Filter(
|
||||
nodes.Name(target, "load").set_lineno(lineno),
|
||||
"load_{}".format(converter),
|
||||
f"load_{converter}",
|
||||
[],
|
||||
[],
|
||||
None,
|
||||
|
@ -1234,7 +1233,7 @@ class SerializerExtension(Extension):
|
|||
).set_lineno(lineno),
|
||||
]
|
||||
return self._parse_profile_block(
|
||||
parser, import_node.template, "import_{}".format(converter), body, lineno
|
||||
parser, import_node.template, f"import_{converter}", body, lineno
|
||||
)
|
||||
|
||||
def dict_to_sls_yaml_params(self, value, flow_style=False):
|
||||
|
|
|
@ -41,7 +41,7 @@ def store_job(opts, load, event=None, mminion=None):
|
|||
nocache=load.get("nocache", False)
|
||||
)
|
||||
except KeyError:
|
||||
emsg = "Returner '{}' does not support function prep_jid".format(job_cache)
|
||||
emsg = f"Returner '{job_cache}' does not support function prep_jid"
|
||||
log.error(emsg)
|
||||
raise KeyError(emsg)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
@ -52,11 +52,11 @@ def store_job(opts, load, event=None, mminion=None):
|
|||
)
|
||||
|
||||
# save the load, since we don't have it
|
||||
saveload_fstr = "{}.save_load".format(job_cache)
|
||||
saveload_fstr = f"{job_cache}.save_load"
|
||||
try:
|
||||
mminion.returners[saveload_fstr](load["jid"], load)
|
||||
except KeyError:
|
||||
emsg = "Returner '{}' does not support function save_load".format(job_cache)
|
||||
emsg = f"Returner '{job_cache}' does not support function save_load"
|
||||
log.error(emsg)
|
||||
raise KeyError(emsg)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
@ -67,11 +67,11 @@ def store_job(opts, load, event=None, mminion=None):
|
|||
)
|
||||
elif salt.utils.jid.is_jid(load["jid"]):
|
||||
# Store the jid
|
||||
jidstore_fstr = "{}.prep_jid".format(job_cache)
|
||||
jidstore_fstr = f"{job_cache}.prep_jid"
|
||||
try:
|
||||
mminion.returners[jidstore_fstr](False, passed_jid=load["jid"])
|
||||
except KeyError:
|
||||
emsg = "Returner '{}' does not support function prep_jid".format(job_cache)
|
||||
emsg = f"Returner '{job_cache}' does not support function prep_jid"
|
||||
log.error(emsg)
|
||||
raise KeyError(emsg)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
@ -104,10 +104,10 @@ def store_job(opts, load, event=None, mminion=None):
|
|||
return
|
||||
|
||||
# otherwise, write to the master cache
|
||||
savefstr = "{}.save_load".format(job_cache)
|
||||
getfstr = "{}.get_load".format(job_cache)
|
||||
fstr = "{}.returner".format(job_cache)
|
||||
updateetfstr = "{}.update_endtime".format(job_cache)
|
||||
savefstr = f"{job_cache}.save_load"
|
||||
getfstr = f"{job_cache}.get_load"
|
||||
fstr = f"{job_cache}.returner"
|
||||
updateetfstr = f"{job_cache}.update_endtime"
|
||||
if "fun" not in load and load.get("return", {}):
|
||||
ret_ = load.get("return", {})
|
||||
if "fun" in ret_:
|
||||
|
@ -121,7 +121,7 @@ def store_job(opts, load, event=None, mminion=None):
|
|||
getfstr_func = mminion.returners[getfstr]
|
||||
fstr_func = mminion.returners[fstr]
|
||||
except KeyError as error:
|
||||
emsg = "Returner '{}' does not support function {}".format(job_cache, error)
|
||||
emsg = f"Returner '{job_cache}' does not support function {error}"
|
||||
log.error(emsg)
|
||||
raise KeyError(emsg)
|
||||
|
||||
|
@ -156,14 +156,12 @@ def store_minions(opts, jid, minions, mminion=None, syndic_id=None):
|
|||
if mminion is None:
|
||||
mminion = salt.minion.MasterMinion(opts, states=False, rend=False)
|
||||
job_cache = opts["master_job_cache"]
|
||||
minions_fstr = "{}.save_minions".format(job_cache)
|
||||
minions_fstr = f"{job_cache}.save_minions"
|
||||
|
||||
try:
|
||||
mminion.returners[minions_fstr](jid, minions, syndic_id=syndic_id)
|
||||
except KeyError:
|
||||
raise KeyError(
|
||||
"Returner '{}' does not support function save_minions".format(job_cache)
|
||||
)
|
||||
raise KeyError(f"Returner '{job_cache}' does not support function save_minions")
|
||||
|
||||
|
||||
def get_retcode(ret):
|
||||
|
@ -192,7 +190,7 @@ def get_keep_jobs_seconds(opts):
|
|||
keep_jobs = opts.get("keep_jobs", 24)
|
||||
if keep_jobs_seconds == 86400 and keep_jobs != 24:
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'keep_jobs' option has been deprecated and replaced with "
|
||||
"'keep_jobs_seconds'.",
|
||||
)
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
"""
|
||||
the locale utils used by salt
|
||||
"""
|
||||
|
||||
|
||||
import locale
|
||||
import sys
|
||||
|
||||
from salt.utils.decorators import memoize as real_memoize
|
||||
|
@ -83,3 +82,39 @@ def normalize_locale(loc):
|
|||
comps["codeset"] = comps["codeset"].lower().replace("-", "")
|
||||
comps["charmap"] = ""
|
||||
return join_locale(comps)
|
||||
|
||||
|
||||
def getdefaultlocale(envvars=("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")):
|
||||
"""
|
||||
This function was backported from Py3.11 which started triggering a
|
||||
deprecation warning about it's removal in 3.13.
|
||||
"""
|
||||
try:
|
||||
# check if it's supported by the _locale module
|
||||
import _locale
|
||||
|
||||
code, encoding = _locale._getdefaultlocale()
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
else:
|
||||
# make sure the code/encoding values are valid
|
||||
if sys.platform == "win32" and code and code[:2] == "0x":
|
||||
# map windows language identifier to language name
|
||||
code = locale.windows_locale.get(int(code, 0))
|
||||
# ...add other platform-specific processing here, if
|
||||
# necessary...
|
||||
return code, encoding
|
||||
|
||||
# fall back on POSIX behaviour
|
||||
import os
|
||||
|
||||
lookup = os.environ.get
|
||||
for variable in envvars:
|
||||
localename = lookup(variable, None)
|
||||
if localename:
|
||||
if variable == "LANGUAGE":
|
||||
localename = localename.split(":")[0]
|
||||
break
|
||||
else:
|
||||
localename = "C"
|
||||
return locale._parse_localename(localename)
|
||||
|
|
|
@ -94,7 +94,7 @@ class SaltNeutron(NeutronShell):
|
|||
Set up neutron credentials
|
||||
"""
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The neutron module has been deprecated and will be removed in {version}.\n"
|
||||
"This includes\n"
|
||||
"* salt.utils.openstack.neutron\n"
|
||||
|
|
|
@ -105,7 +105,7 @@ def _format_warning(message, category, filename, lineno, line=None):
|
|||
Replacement for warnings.formatwarning that disables the echoing of
|
||||
the 'line' parameter.
|
||||
"""
|
||||
return "{}:{}: {}: {}\n".format(filename, lineno, category.__name__, message)
|
||||
return f"{filename}:{lineno}: {category.__name__}: {message}\n"
|
||||
|
||||
|
||||
def warn_until(
|
||||
|
@ -317,7 +317,7 @@ def kwargs_warn_until(
|
|||
_version_ = salt.version.SaltStackVersion(*_version_info_)
|
||||
|
||||
if kwargs or _version_.info >= version.info:
|
||||
arg_names = ", ".join("'{}'".format(key) for key in kwargs)
|
||||
arg_names = ", ".join(f"'{key}'" for key in kwargs)
|
||||
warn_until(
|
||||
version,
|
||||
message=(
|
||||
|
@ -433,7 +433,7 @@ def check_boto_reqs(
|
|||
boto_ver = "2.0.0"
|
||||
|
||||
if not has_boto or version_cmp(boto.__version__, boto_ver) == -1:
|
||||
return False, "A minimum version of boto {} is required.".format(boto_ver)
|
||||
return False, f"A minimum version of boto {boto_ver} is required."
|
||||
|
||||
if check_boto3 is True:
|
||||
try:
|
||||
|
@ -455,12 +455,12 @@ def check_boto_reqs(
|
|||
if not has_boto3 or version_cmp(boto3.__version__, boto3_ver) == -1:
|
||||
return (
|
||||
False,
|
||||
"A minimum version of boto3 {} is required.".format(boto3_ver),
|
||||
f"A minimum version of boto3 {boto3_ver} is required.",
|
||||
)
|
||||
elif version_cmp(botocore.__version__, botocore_ver) == -1:
|
||||
return (
|
||||
False,
|
||||
"A minimum version of botocore {} is required".format(botocore_ver),
|
||||
f"A minimum version of botocore {botocore_ver} is required",
|
||||
)
|
||||
|
||||
return True
|
||||
|
|
|
@ -215,7 +215,7 @@ def ensure_cert_kwargs_compat(kwargs):
|
|||
for long_name in long_names:
|
||||
if long_name in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
f"Found {long_name} in keyword args. Please migrate to the short name: {name}",
|
||||
)
|
||||
kwargs[name] = kwargs.pop(long_name)
|
||||
|
@ -224,7 +224,7 @@ def ensure_cert_kwargs_compat(kwargs):
|
|||
for long_name in long_names:
|
||||
if long_name in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
f"Found {long_name} in keyword args. Please migrate to the short name: {extname}",
|
||||
)
|
||||
kwargs[extname] = kwargs.pop(long_name)
|
||||
|
@ -581,7 +581,7 @@ def merge_signing_policy(policy, kwargs):
|
|||
for long_name in long_names:
|
||||
if long_name in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
f"Found {long_name} in keyword args. Please migrate to the short name: {name}",
|
||||
)
|
||||
kwargs[name] = kwargs.pop(long_name)
|
||||
|
@ -591,7 +591,7 @@ def merge_signing_policy(policy, kwargs):
|
|||
for long_name in long_names:
|
||||
if long_name in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
"Potassium",
|
||||
3009,
|
||||
f"Found {long_name} in keyword args. Please migrate to the short name: {extname}",
|
||||
)
|
||||
kwargs[extname] = kwargs.pop(long_name)
|
||||
|
|
|
@ -51,7 +51,7 @@ def ip_bracket(addr):
|
|||
"This function has been moved to salt.utils.network.ip_bracket"
|
||||
|
||||
salt.utils.versions.warn_until(
|
||||
"Argon",
|
||||
3008,
|
||||
"The 'utils.zeromq.ip_bracket' has been moved to 'utils.network.ip_bracket'. "
|
||||
"Please use 'utils.network.ip_bracket' because 'utils.zeromq.ip_bracket' "
|
||||
"will be removed in future releases.",
|
||||
|
|
|
@ -24,6 +24,20 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
|
|||
user_ssh_dir = os.path.join(userdetails["home"], ".ssh")
|
||||
authorized_keys_file = os.path.join(user_ssh_dir, "authorized_keys")
|
||||
|
||||
key1 = (
|
||||
# Explicit no ending line break
|
||||
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC3dd5ACsvJhnIOrn6bSOkX5"
|
||||
"KyVDpTYsVAaJj3AmEo6Fr5cHXJFJoJS+Ld8K5vCscPzuXashdYUdrhL1E5Liz"
|
||||
"bza+zneQ5AkJ7sn2NXymD6Bbra+infO4NgnQXbGMp/NyY65jbQGqJeQ081iEV"
|
||||
f"YbDP2zXp6fmrqqmFCaakZfGRbVw== root"
|
||||
)
|
||||
key2 = (
|
||||
"AAAAB3NzaC1yc2EAAAADAQABAAAAgQC7h77HyBPCUDONCs5bI/PrrPwyYJegl0"
|
||||
"f9YWLaBofVYOUl/uSv1ux8zjIoLVs4kguY1ihtIoK2kho4YsjNtIaAd6twdua9"
|
||||
"oqCg2g/54cIK/8WbIjwnb3LFRgyTG5DFuj+7526EdJycAZvhSzIZYui3RUj4Vp"
|
||||
"eMoF7mcB6TIK2/2w=="
|
||||
)
|
||||
|
||||
ret = self.run_state(
|
||||
"file.managed",
|
||||
name=authorized_keys_file,
|
||||
|
@ -31,23 +45,22 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
|
|||
makedirs=True,
|
||||
contents_newline=False,
|
||||
# Explicit no ending line break
|
||||
contents="ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root",
|
||||
contents=key1,
|
||||
)
|
||||
|
||||
ret = self.run_state(
|
||||
"ssh_auth.present",
|
||||
name="AAAAB3NzaC1kcQ9J5bYTEyZ==",
|
||||
name=key2,
|
||||
enc="ssh-rsa",
|
||||
user=username,
|
||||
comment=username,
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertSaltStateChangesEqual(ret, {"AAAAB3NzaC1kcQ9J5bYTEyZ==": "New"})
|
||||
self.assertSaltStateChangesEqual(ret, {key2: "New"})
|
||||
with salt.utils.files.fopen(authorized_keys_file, "r") as fhr:
|
||||
self.assertEqual(
|
||||
fhr.read(),
|
||||
"ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root\n"
|
||||
"ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username),
|
||||
f"{key1}\nssh-rsa {key2} {username}\n",
|
||||
)
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
|
@ -60,39 +73,48 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
|
|||
authorized_keys_file = os.path.join(user_ssh_dir, "authorized_keys")
|
||||
|
||||
key_fname = "issue_10198.id_rsa.pub"
|
||||
key_contents = (
|
||||
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC3dd5ACsvJhnIOrn6bSOkX5"
|
||||
"KyVDpTYsVAaJj3AmEo6Fr5cHXJFJoJS+Ld8K5vCscPzuXashdYUdrhL1E5Liz"
|
||||
"bza+zneQ5AkJ7sn2NXymD6Bbra+infO4NgnQXbGMp/NyY65jbQGqJeQ081iEV"
|
||||
f"YbDP2zXp6fmrqqmFCaakZfGRbVw== {username}\n"
|
||||
)
|
||||
|
||||
# Create the keyfile that we expect to get back on the state call
|
||||
with salt.utils.files.fopen(
|
||||
os.path.join(RUNTIME_VARS.TMP_PRODENV_STATE_TREE, key_fname), "w"
|
||||
) as kfh:
|
||||
kfh.write("ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username))
|
||||
kfh.write(key_contents)
|
||||
|
||||
# Create a bogus key file on base environment
|
||||
with salt.utils.files.fopen(
|
||||
os.path.join(RUNTIME_VARS.TMP_STATE_TREE, key_fname), "w"
|
||||
) as kfh:
|
||||
kfh.write("ssh-rsa BAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username))
|
||||
kfh.write(
|
||||
"ssh-rsa A!AAB3NzaC1yc2EAAAADAQABAAAAgQC3dd5ACsvJhnIOrn6bSOkX5"
|
||||
"KyVDpTYsVAaJj3AmEo6Fr5cHXJFJoJS+Ld8K5vCscPzuXashdYUdrhL1E5Liz"
|
||||
"bza+zneQ5AkJ7sn2NXymD6Bbra+infO4NgnQXbGMp/NyY65jbQGqJeQ081iEV"
|
||||
f"YbDP2zXp6fmrqqmFCaakZfGRbVw== {username}\n"
|
||||
)
|
||||
|
||||
ret = self.run_state(
|
||||
"ssh_auth.present",
|
||||
name="Setup Keys",
|
||||
source="salt://{}?saltenv=prod".format(key_fname),
|
||||
source=f"salt://{key_fname}?saltenv=prod",
|
||||
enc="ssh-rsa",
|
||||
user=username,
|
||||
comment=username,
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
with salt.utils.files.fopen(authorized_keys_file, "r") as fhr:
|
||||
self.assertEqual(
|
||||
fhr.read(), "ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username)
|
||||
)
|
||||
self.assertEqual(fhr.read(), key_contents)
|
||||
|
||||
os.unlink(authorized_keys_file)
|
||||
|
||||
ret = self.run_state(
|
||||
"ssh_auth.present",
|
||||
name="Setup Keys",
|
||||
source="salt://{}".format(key_fname),
|
||||
source=f"salt://{key_fname}",
|
||||
enc="ssh-rsa",
|
||||
user=username,
|
||||
comment=username,
|
||||
|
@ -100,6 +122,4 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
|
|||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
with salt.utils.files.fopen(authorized_keys_file, "r") as fhr:
|
||||
self.assertEqual(
|
||||
fhr.read(), "ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username)
|
||||
)
|
||||
self.assertEqual(fhr.read(), key_contents)
|
||||
|
|
|
@ -139,7 +139,7 @@ def test_file_managed_web_source_etag_operation(
|
|||
minion_opts["cachedir"],
|
||||
"extrn_files",
|
||||
"base",
|
||||
"localhost:{free_port}".format(free_port=free_port),
|
||||
f"localhost:{free_port}",
|
||||
"foo.txt",
|
||||
)
|
||||
cached_etag = cached_file + ".etag"
|
||||
|
@ -151,7 +151,7 @@ def test_file_managed_web_source_etag_operation(
|
|||
# 127.0.0.1 - - [08/Jan/2022 00:53:11] "GET /foo.txt HTTP/1.1" 200 -
|
||||
states.file.managed(
|
||||
name=os.path.join(web_root, "bar.txt"),
|
||||
source="http://localhost:{free_port}/foo.txt".format(free_port=free_port),
|
||||
source=f"http://localhost:{free_port}/foo.txt",
|
||||
use_etag=True,
|
||||
)
|
||||
|
||||
|
@ -167,7 +167,7 @@ def test_file_managed_web_source_etag_operation(
|
|||
# 127.0.0.1 - - [08/Jan/2022 00:53:11] "GET /foo.txt HTTP/1.1" 304 -
|
||||
states.file.managed(
|
||||
name=os.path.join(web_root, "bar.txt"),
|
||||
source="http://localhost:{free_port}/foo.txt".format(free_port=free_port),
|
||||
source=f"http://localhost:{free_port}/foo.txt",
|
||||
use_etag=True,
|
||||
)
|
||||
|
||||
|
@ -183,7 +183,7 @@ def test_file_managed_web_source_etag_operation(
|
|||
# No call to the web server will be made.
|
||||
states.file.managed(
|
||||
name=os.path.join(web_root, "bar.txt"),
|
||||
source="http://localhost:{free_port}/foo.txt".format(free_port=free_port),
|
||||
source=f"http://localhost:{free_port}/foo.txt",
|
||||
use_etag=False,
|
||||
)
|
||||
|
||||
|
@ -195,9 +195,31 @@ def test_file_managed_web_source_etag_operation(
|
|||
# 127.0.0.1 - - [08/Jan/2022 00:53:12] "GET /foo.txt HTTP/1.1" 200 -
|
||||
states.file.managed(
|
||||
name=os.path.join(web_root, "bar.txt"),
|
||||
source="http://localhost:{free_port}/foo.txt".format(free_port=free_port),
|
||||
source=f"http://localhost:{free_port}/foo.txt",
|
||||
use_etag=True,
|
||||
)
|
||||
|
||||
# The modified time of the cached file now changes
|
||||
assert cached_file_mtime != os.path.getmtime(cached_file)
|
||||
|
||||
|
||||
def test_file_symlink_replace_existing_link(states, tmp_path):
|
||||
# symlink name and target for state
|
||||
name = tmp_path / "foo"
|
||||
target = tmp_path / "baz"
|
||||
|
||||
# create existing symlink to replace
|
||||
old_target = tmp_path / "bar"
|
||||
name.symlink_to(old_target)
|
||||
|
||||
ret = states.file.symlink(
|
||||
name=str(name),
|
||||
target=str(target),
|
||||
)
|
||||
|
||||
assert ret.filtered == {
|
||||
"name": str(name),
|
||||
"changes": {"new": str(name)},
|
||||
"comment": f"Created new symlink {str(name)} -> {str(target)}",
|
||||
"result": True,
|
||||
}
|
||||
|
|
|
@ -75,10 +75,7 @@ def PKG_32_TARGETS(grains):
|
|||
_PKG_32_TARGETS = []
|
||||
if grains["os_family"] == "RedHat":
|
||||
if grains["os"] == "CentOS":
|
||||
if grains["osmajorrelease"] == 5:
|
||||
_PKG_32_TARGETS = ["xz-devel.i386"]
|
||||
else:
|
||||
_PKG_32_TARGETS.append("xz-devel.i686")
|
||||
_PKG_32_TARGETS.append("xz-devel.i686")
|
||||
if not _PKG_32_TARGETS:
|
||||
pytest.skip("No 32 bit packages have been specified for testing")
|
||||
return _PKG_32_TARGETS
|
||||
|
@ -88,11 +85,7 @@ def PKG_32_TARGETS(grains):
|
|||
def PKG_DOT_TARGETS(grains):
|
||||
_PKG_DOT_TARGETS = []
|
||||
if grains["os_family"] == "RedHat":
|
||||
if grains["osmajorrelease"] == 5:
|
||||
_PKG_DOT_TARGETS = ["python-migrate0.5"]
|
||||
elif grains["osmajorrelease"] == 6:
|
||||
_PKG_DOT_TARGETS = ["tomcat6-el-2.1-api"]
|
||||
elif grains["osmajorrelease"] == 7:
|
||||
if grains["osmajorrelease"] == 7:
|
||||
_PKG_DOT_TARGETS = ["tomcat-el-2.2-api"]
|
||||
elif grains["osmajorrelease"] == 8:
|
||||
_PKG_DOT_TARGETS = ["aspnetcore-runtime-6.0"]
|
||||
|
@ -455,9 +448,10 @@ def test_pkg_011_latest_only_upgrade(
|
|||
new_version = modules.pkg.version(target, use_context=False)
|
||||
assert new_version == updates[target]
|
||||
ret = states.pkg.latest(name=target, refresh=False, only_upgrade=True)
|
||||
assert ret.raw["pkg_|-{0}_|-{0}_|-latest".format(target)][
|
||||
"comment"
|
||||
] == "Package {} is already up-to-date".format(target)
|
||||
assert (
|
||||
ret.raw["pkg_|-{0}_|-{0}_|-latest".format(target)]["comment"]
|
||||
== f"Package {target} is already up-to-date"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("WILDCARDS_SUPPORTED")
|
||||
|
@ -605,7 +599,7 @@ def test_pkg_015_installed_held(grains, modules, states, PKG_TARGETS):
|
|||
except AssertionError as exc:
|
||||
log.debug("Versionlock package not found:\n%s", exc)
|
||||
else:
|
||||
pytest.fail("Could not install versionlock package from {}".format(pkgs))
|
||||
pytest.fail(f"Could not install versionlock package from {pkgs}")
|
||||
|
||||
target = PKG_TARGETS[0]
|
||||
|
||||
|
@ -624,7 +618,7 @@ def test_pkg_015_installed_held(grains, modules, states, PKG_TARGETS):
|
|||
)
|
||||
|
||||
if versionlock_pkg and "-versionlock is not installed" in str(ret):
|
||||
pytest.skip("{} `{}` is installed".format(ret, versionlock_pkg))
|
||||
pytest.skip(f"{ret} `{versionlock_pkg}` is installed")
|
||||
|
||||
# changes from pkg.hold for Red Hat family are different
|
||||
target_changes = {}
|
||||
|
@ -724,7 +718,7 @@ def test_pkg_017_installed_held_equals_false(grains, modules, states, PKG_TARGET
|
|||
except AssertionError as exc:
|
||||
log.debug("Versionlock package not found:\n%s", exc)
|
||||
else:
|
||||
pytest.fail("Could not install versionlock package from {}".format(pkgs))
|
||||
pytest.fail(f"Could not install versionlock package from {pkgs}")
|
||||
|
||||
target = PKG_TARGETS[0]
|
||||
|
||||
|
@ -737,7 +731,7 @@ def test_pkg_017_installed_held_equals_false(grains, modules, states, PKG_TARGET
|
|||
assert target_ret.result is True
|
||||
|
||||
if versionlock_pkg and "-versionlock is not installed" in str(target_ret):
|
||||
pytest.skip("{} `{}` is installed".format(target_ret, versionlock_pkg))
|
||||
pytest.skip(f"{target_ret} `{versionlock_pkg}` is installed")
|
||||
|
||||
try:
|
||||
tag = "pkg_|-{0}_|-{0}_|-installed".format(target)
|
||||
|
@ -789,7 +783,7 @@ def test_pkg_cap_001_installed(PKG_CAP_TARGETS, modules, states):
|
|||
test=True,
|
||||
)
|
||||
assert (
|
||||
"The following packages would be installed/updated: {}".format(realpkg)
|
||||
f"The following packages would be installed/updated: {realpkg}"
|
||||
in ret.comment
|
||||
)
|
||||
ret = states.pkg.installed(
|
||||
|
@ -887,7 +881,7 @@ def test_pkg_cap_003_installed_multipkg_with_version(
|
|||
test=True,
|
||||
)
|
||||
assert "packages would be installed/updated" in ret.comment
|
||||
assert "{}={}".format(realpkg, realver) in ret.comment
|
||||
assert f"{realpkg}={realver}" in ret.comment
|
||||
|
||||
ret = states.pkg.installed(
|
||||
name="test_pkg_cap_003_installed_multipkg_with_version-install-capability",
|
||||
|
@ -931,7 +925,7 @@ def test_pkg_cap_004_latest(PKG_CAP_TARGETS, modules, states):
|
|||
test=True,
|
||||
)
|
||||
assert (
|
||||
"The following packages would be installed/upgraded: {}".format(realpkg)
|
||||
f"The following packages would be installed/upgraded: {realpkg}"
|
||||
in ret.comment
|
||||
)
|
||||
ret = states.pkg.latest(name=target, refresh=False, resolve_capabilities=True)
|
||||
|
@ -971,9 +965,7 @@ def test_pkg_cap_005_downloaded(PKG_CAP_TARGETS, modules, states):
|
|||
resolve_capabilities=True,
|
||||
test=True,
|
||||
)
|
||||
assert (
|
||||
"The following packages would be downloaded: {}".format(realpkg) in ret.comment
|
||||
)
|
||||
assert f"The following packages would be downloaded: {realpkg}" in ret.comment
|
||||
|
||||
ret = states.pkg.downloaded(name=target, refresh=False, resolve_capabilities=True)
|
||||
assert ret.result is True
|
||||
|
|
|
@ -185,14 +185,6 @@ def test_adduser():
|
|||
Tests if specified user gets added in the group.
|
||||
"""
|
||||
os_version_list = [
|
||||
{
|
||||
"grains": {
|
||||
"kernel": "Linux",
|
||||
"os_family": "RedHat",
|
||||
"osmajorrelease": "5",
|
||||
},
|
||||
"cmd": ["/bin/gpasswd", "-a", "root", "test"],
|
||||
},
|
||||
{
|
||||
"grains": {
|
||||
"kernel": "Linux",
|
||||
|
@ -214,18 +206,12 @@ def test_adduser():
|
|||
"salt.utils.path.which",
|
||||
MagicMock(
|
||||
side_effect=[
|
||||
None,
|
||||
"/bin/gpasswd",
|
||||
"/bin/usermod",
|
||||
"/bin/gpasswd",
|
||||
"/bin/usermod",
|
||||
]
|
||||
),
|
||||
):
|
||||
with patch.dict(groupadd.__grains__, os_version_list[0]["grains"]):
|
||||
with pytest.raises(CommandExecutionError):
|
||||
groupadd.adduser("test", "root")
|
||||
|
||||
for os_version in os_version_list:
|
||||
mock = MagicMock(return_value={"retcode": 0})
|
||||
with patch.dict(groupadd.__grains__, os_version["grains"]), patch.dict(
|
||||
|
@ -242,14 +228,6 @@ def test_deluser():
|
|||
Tests if specified user gets deleted from the group.
|
||||
"""
|
||||
os_version_list = [
|
||||
{
|
||||
"grains": {
|
||||
"kernel": "Linux",
|
||||
"os_family": "RedHat",
|
||||
"osmajorrelease": "5",
|
||||
},
|
||||
"cmd": ["/bin/gpasswd", "-d", "root", "test"],
|
||||
},
|
||||
{
|
||||
"grains": {
|
||||
"kernel": "Linux",
|
||||
|
@ -269,30 +247,12 @@ def test_deluser():
|
|||
"salt.utils.path.which",
|
||||
MagicMock(
|
||||
side_effect=[
|
||||
None,
|
||||
"/bin/gpasswd",
|
||||
"/bin/usermod",
|
||||
"/bin/gpasswd",
|
||||
"/bin/usermod",
|
||||
]
|
||||
),
|
||||
):
|
||||
with patch.dict(groupadd.__grains__, os_version_list[0]["grains"]), patch.dict(
|
||||
groupadd.__salt__,
|
||||
{
|
||||
"group.info": MagicMock(
|
||||
return_value={
|
||||
"passwd": "*",
|
||||
"gid": 0,
|
||||
"name": "test",
|
||||
"members": ["root"],
|
||||
}
|
||||
),
|
||||
},
|
||||
):
|
||||
with pytest.raises(CommandExecutionError):
|
||||
groupadd.deluser("test", "root")
|
||||
|
||||
for os_version in os_version_list:
|
||||
mock_retcode = MagicMock(return_value=0)
|
||||
mock_stdout = MagicMock(return_value="test foo")
|
||||
|
@ -325,14 +285,6 @@ def test_members():
|
|||
Tests if members of the group, get replaced with a provided list.
|
||||
"""
|
||||
os_version_list = [
|
||||
{
|
||||
"grains": {
|
||||
"kernel": "Linux",
|
||||
"os_family": "RedHat",
|
||||
"osmajorrelease": "5",
|
||||
},
|
||||
"cmd": ["/bin/gpasswd", "-M", "foo", "test"],
|
||||
},
|
||||
{
|
||||
"grains": {
|
||||
"kernel": "Linux",
|
||||
|
@ -352,8 +304,6 @@ def test_members():
|
|||
"salt.utils.path.which",
|
||||
MagicMock(
|
||||
side_effect=[
|
||||
None,
|
||||
"/bin/gpasswd",
|
||||
"/bin/gpasswd",
|
||||
"/bin/groupmod",
|
||||
"/bin/gpasswd",
|
||||
|
@ -363,10 +313,6 @@ def test_members():
|
|||
]
|
||||
),
|
||||
):
|
||||
with patch.dict(groupadd.__grains__, os_version_list[0]["grains"]):
|
||||
with pytest.raises(CommandExecutionError):
|
||||
groupadd.members("test", "foo")
|
||||
|
||||
for os_version in os_version_list:
|
||||
mock_ret = MagicMock(return_value={"retcode": 0})
|
||||
mock_stdout = MagicMock(return_value={"cmd.run_stdout": 1})
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
|
||||
import logging
|
||||
from inspect import ArgSpec
|
||||
from inspect import FullArgSpec
|
||||
|
||||
import salt.states.module as module
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
|
@ -117,11 +117,25 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.aspec = ArgSpec(
|
||||
args=["hello", "world"], varargs=None, keywords=None, defaults=False
|
||||
cls.aspec = FullArgSpec(
|
||||
args=["hello", "world"],
|
||||
varargs=None,
|
||||
varkw=None,
|
||||
defaults=False,
|
||||
kwonlyargs=None,
|
||||
kwonlydefaults=None,
|
||||
annotations=None,
|
||||
)
|
||||
|
||||
cls.bspec = ArgSpec(args=[], varargs="names", keywords="kwargs", defaults=None)
|
||||
cls.bspec = FullArgSpec(
|
||||
args=[],
|
||||
varargs="names",
|
||||
varkw=None,
|
||||
defaults=None,
|
||||
kwonlyargs="kwargs",
|
||||
kwonlydefaults=None,
|
||||
annotations=None,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
|
@ -137,8 +151,8 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
module.__opts__, {"use_superseded": ["module.run"]}
|
||||
):
|
||||
ret = module.run(**{CMD: None})
|
||||
if ret["comment"] != "Unavailable function: {}.".format(CMD) or ret["result"]:
|
||||
self.fail("module.run did not fail as expected: {}".format(ret))
|
||||
if ret["comment"] != f"Unavailable function: {CMD}." or ret["result"]:
|
||||
self.fail(f"module.run did not fail as expected: {ret}")
|
||||
|
||||
def test_run_module_not_available_testmode(self):
|
||||
"""
|
||||
|
@ -151,10 +165,10 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
):
|
||||
ret = module.run(**{CMD: None})
|
||||
if (
|
||||
ret["comment"] != "Unavailable function: {}.".format(CMD)
|
||||
ret["comment"] != f"Unavailable function: {CMD}."
|
||||
or ret["result"] is not False
|
||||
):
|
||||
self.fail("module.run did not fail as expected: {}".format(ret))
|
||||
self.fail(f"module.run did not fail as expected: {ret}")
|
||||
|
||||
def test_run_module_noop(self):
|
||||
"""
|
||||
|
@ -166,7 +180,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
):
|
||||
ret = module.run()
|
||||
if ret["comment"] != "No function provided." or ret["result"] is not False:
|
||||
self.fail("module.run did not fail as expected: {}".format(ret))
|
||||
self.fail(f"module.run did not fail as expected: {ret}")
|
||||
|
||||
def test_module_run_hidden_varargs(self):
|
||||
"""
|
||||
|
@ -189,10 +203,10 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
):
|
||||
ret = module.run(**{CMD: None})
|
||||
if (
|
||||
ret["comment"] != "Function {} to be executed.".format(CMD)
|
||||
ret["comment"] != f"Function {CMD} to be executed."
|
||||
or ret["result"] is not None
|
||||
):
|
||||
self.fail("module.run failed: {}".format(ret))
|
||||
self.fail(f"module.run failed: {ret}")
|
||||
|
||||
def test_run_missing_arg(self):
|
||||
"""
|
||||
|
@ -203,9 +217,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
module.__opts__, {"use_superseded": ["module.run"]}
|
||||
):
|
||||
ret = module.run(**{CMD: None})
|
||||
self.assertEqual(
|
||||
ret["comment"], "'{}' failed: Missing arguments: name".format(CMD)
|
||||
)
|
||||
self.assertEqual(ret["comment"], f"'{CMD}' failed: Missing arguments: name")
|
||||
|
||||
def test_run_correct_arg(self):
|
||||
"""
|
||||
|
@ -216,8 +228,8 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
module.__opts__, {"use_superseded": ["module.run"]}
|
||||
):
|
||||
ret = module.run(**{CMD: ["Fred"]})
|
||||
if ret["comment"] != "{}: Success".format(CMD) or not ret["result"]:
|
||||
self.fail("module.run failed: {}".format(ret))
|
||||
if ret["comment"] != f"{CMD}: Success" or not ret["result"]:
|
||||
self.fail(f"module.run failed: {ret}")
|
||||
|
||||
def test_run_state_apply_result_false(self):
|
||||
"""
|
||||
|
@ -294,9 +306,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
):
|
||||
ret = module.run(**{CMD: ["bla", {"example": "bla"}]})
|
||||
self.assertFalse(ret["result"])
|
||||
self.assertEqual(
|
||||
ret["comment"], "'{}' failed: Missing arguments: arg2".format(CMD)
|
||||
)
|
||||
self.assertEqual(ret["comment"], f"'{CMD}' failed: Missing arguments: arg2")
|
||||
|
||||
def test_run_42270_kwargs_to_args(self):
|
||||
"""
|
||||
|
@ -390,9 +400,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
with patch.dict(module.__salt__, {}, clear=True):
|
||||
ret = module._legacy_run(CMD)
|
||||
self.assertFalse(ret["result"])
|
||||
self.assertEqual(
|
||||
ret["comment"], "Module function {} is not available".format(CMD)
|
||||
)
|
||||
self.assertEqual(ret["comment"], f"Module function {CMD} is not available")
|
||||
|
||||
def test_module_run_test_true(self):
|
||||
"""
|
||||
|
@ -400,9 +408,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
|
|||
"""
|
||||
with patch.dict(module.__opts__, {"test": True}):
|
||||
ret = module._legacy_run(CMD)
|
||||
self.assertEqual(
|
||||
ret["comment"], "Module function {} is set to execute".format(CMD)
|
||||
)
|
||||
self.assertEqual(ret["comment"], f"Module function {CMD} is set to execute")
|
||||
|
||||
def test_module_run_missing_arg(self):
|
||||
"""
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
import collections
|
||||
import compileall
|
||||
import copy
|
||||
import imp
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
|
@ -35,15 +34,15 @@ log = logging.getLogger(__name__)
|
|||
|
||||
def remove_bytecode(module_path):
|
||||
paths = [module_path + "c"]
|
||||
if hasattr(imp, "get_tag"):
|
||||
modname, ext = os.path.splitext(module_path.split(os.sep)[-1])
|
||||
paths.append(
|
||||
os.path.join(
|
||||
os.path.dirname(module_path),
|
||||
"__pycache__",
|
||||
"{}.{}.pyc".format(modname, imp.get_tag()),
|
||||
)
|
||||
cache_tag = sys.implementation.cache_tag
|
||||
modname, ext = os.path.splitext(module_path.split(os.sep)[-1])
|
||||
paths.append(
|
||||
os.path.join(
|
||||
os.path.dirname(module_path),
|
||||
"__pycache__",
|
||||
f"{modname}.{cache_tag}.pyc",
|
||||
)
|
||||
)
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
os.unlink(path)
|
||||
|
@ -84,9 +83,7 @@ class LazyLoaderTest(TestCase):
|
|||
# Setup the module
|
||||
self.module_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
self.addCleanup(shutil.rmtree, self.module_dir, ignore_errors=True)
|
||||
self.module_file = os.path.join(
|
||||
self.module_dir, "{}.py".format(self.module_name)
|
||||
)
|
||||
self.module_file = os.path.join(self.module_dir, f"{self.module_name}.py")
|
||||
with salt.utils.files.fopen(self.module_file, "w") as fh:
|
||||
fh.write(salt.utils.stringutils.to_str(loader_template))
|
||||
fh.flush()
|
||||
|
@ -163,16 +160,14 @@ class LazyLoaderUtilsTest(TestCase):
|
|||
def setUp(self):
|
||||
# Setup the module
|
||||
self.module_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
self.module_file = os.path.join(
|
||||
self.module_dir, "{}.py".format(self.module_name)
|
||||
)
|
||||
self.module_file = os.path.join(self.module_dir, f"{self.module_name}.py")
|
||||
with salt.utils.files.fopen(self.module_file, "w") as fh:
|
||||
fh.write(salt.utils.stringutils.to_str(loader_template_module))
|
||||
fh.flush()
|
||||
os.fsync(fh.fileno())
|
||||
|
||||
self.utils_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
self.utils_file = os.path.join(self.utils_dir, "{}.py".format(self.utils_name))
|
||||
self.utils_file = os.path.join(self.utils_dir, f"{self.utils_name}.py")
|
||||
with salt.utils.files.fopen(self.utils_file, "w") as fh:
|
||||
fh.write(salt.utils.stringutils.to_str(loader_template_utils))
|
||||
fh.flush()
|
||||
|
@ -516,7 +511,7 @@ class LazyLoaderSingleItem(TestCase):
|
|||
Checks that a KeyError is raised when the function key does not contain a '.'
|
||||
"""
|
||||
key = "testing_no_dot"
|
||||
expected = "The key '{}' should contain a '.'".format(key)
|
||||
expected = f"The key '{key}' should contain a '.'"
|
||||
with self.assertRaises(KeyError) as err:
|
||||
inspect.isfunction(self.loader["testing_no_dot"])
|
||||
|
||||
|
@ -619,7 +614,7 @@ class LazyLoaderReloadingTest(TestCase):
|
|||
|
||||
@property
|
||||
def module_path(self):
|
||||
return os.path.join(self.tmp_dir, "{}.py".format(self.module_name))
|
||||
return os.path.join(self.tmp_dir, f"{self.module_name}.py")
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_alias(self):
|
||||
|
@ -630,17 +625,15 @@ class LazyLoaderReloadingTest(TestCase):
|
|||
self.assertNotIn(self.module_key, self.loader)
|
||||
|
||||
self.update_module()
|
||||
self.assertNotIn("{}.test_alias".format(self.module_name), self.loader)
|
||||
self.assertNotIn(f"{self.module_name}.test_alias", self.loader)
|
||||
self.assertTrue(
|
||||
isinstance(
|
||||
self.loader["{}.working_alias".format(self.module_name)],
|
||||
self.loader[f"{self.module_name}.working_alias"],
|
||||
salt.loader.lazy.LoadedFunc,
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
inspect.isfunction(
|
||||
self.loader["{}.working_alias".format(self.module_name)].func
|
||||
)
|
||||
inspect.isfunction(self.loader[f"{self.module_name}.working_alias"].func)
|
||||
)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
|
@ -802,7 +795,7 @@ class LazyLoaderVirtualAliasTest(TestCase):
|
|||
|
||||
@property
|
||||
def module_path(self):
|
||||
return os.path.join(self.tmp_dir, "{}.py".format(self.module_name))
|
||||
return os.path.join(self.tmp_dir, f"{self.module_name}.py")
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_virtual_alias(self):
|
||||
|
@ -1199,7 +1192,7 @@ class LazyLoaderDeepSubmodReloadingTest(TestCase):
|
|||
"__salt__": self.minion_mods,
|
||||
},
|
||||
)
|
||||
self.assertIn("{}.top".format(self.module_name), self.loader)
|
||||
self.assertIn(f"{self.module_name}.top", self.loader)
|
||||
|
||||
def tearDown(self):
|
||||
del self.tmp_dir
|
||||
|
@ -1241,7 +1234,7 @@ class LazyLoaderDeepSubmodReloadingTest(TestCase):
|
|||
|
||||
@pytest.mark.slow_test
|
||||
def test_basic(self):
|
||||
self.assertIn("{}.top".format(self.module_name), self.loader)
|
||||
self.assertIn(f"{self.module_name}.top", self.loader)
|
||||
|
||||
def _verify_libs(self):
|
||||
for lib in self.libs:
|
||||
|
@ -1549,9 +1542,7 @@ class LazyLoaderOptimizationOrderTest(TestCase):
|
|||
# Setup the module
|
||||
self.module_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
self.addCleanup(shutil.rmtree, self.module_dir, ignore_errors=True)
|
||||
self.module_file = os.path.join(
|
||||
self.module_dir, "{}.py".format(self.module_name)
|
||||
)
|
||||
self.module_file = os.path.join(self.module_dir, f"{self.module_name}.py")
|
||||
|
||||
def tearDown(self):
|
||||
try:
|
||||
|
@ -1585,7 +1576,7 @@ class LazyLoaderOptimizationOrderTest(TestCase):
|
|||
return "lazyloadertest.cpython-{}{}{}.pyc".format(
|
||||
sys.version_info[0],
|
||||
sys.version_info[1],
|
||||
"" if not optimize else ".opt-{}".format(optimize),
|
||||
"" if not optimize else f".opt-{optimize}",
|
||||
)
|
||||
|
||||
def _write_module_file(self):
|
||||
|
|
|
@ -56,6 +56,7 @@ class TransportMethodsTest(TestCase):
|
|||
"__format__",
|
||||
"__ge__",
|
||||
"__getattribute__",
|
||||
"__getstate__",
|
||||
"__gt__",
|
||||
"__hash__",
|
||||
"__init__",
|
||||
|
@ -71,9 +72,9 @@ class TransportMethodsTest(TestCase):
|
|||
"__sizeof__",
|
||||
"__str__",
|
||||
"__subclasshook__",
|
||||
"destroy",
|
||||
"get_method",
|
||||
"run_func",
|
||||
"destroy",
|
||||
]
|
||||
for name in dir(aes_funcs):
|
||||
if name in aes_funcs.expose_methods:
|
||||
|
@ -108,6 +109,7 @@ class TransportMethodsTest(TestCase):
|
|||
"__format__",
|
||||
"__ge__",
|
||||
"__getattribute__",
|
||||
"__getstate__",
|
||||
"__gt__",
|
||||
"__hash__",
|
||||
"__init__",
|
||||
|
@ -128,9 +130,9 @@ class TransportMethodsTest(TestCase):
|
|||
"_prep_pub",
|
||||
"_send_pub",
|
||||
"_send_ssh_pub",
|
||||
"get_method",
|
||||
"destroy",
|
||||
"connect",
|
||||
"destroy",
|
||||
"get_method",
|
||||
]
|
||||
for name in dir(clear_funcs):
|
||||
if name in clear_funcs.expose_methods:
|
||||
|
|
|
@ -4,6 +4,7 @@ import platform
|
|||
import posixpath
|
||||
import sys
|
||||
import tempfile
|
||||
import types
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -40,14 +41,14 @@ class PathJoinTestCase(TestCase):
|
|||
def test_nix_paths(self):
|
||||
for idx, (parts, expected) in enumerate(self.NIX_PATHS):
|
||||
path = salt.utils.path.join(*parts)
|
||||
assert "{}: {}".format(idx, path) == "{}: {}".format(idx, expected)
|
||||
assert f"{idx}: {path}" == f"{idx}: {expected}"
|
||||
|
||||
@pytest.mark.skip(reason="Skipped until properly mocked")
|
||||
@pytest.mark.skip_unless_on_windows
|
||||
def test_windows_paths(self):
|
||||
for idx, (parts, expected) in enumerate(self.WIN_PATHS):
|
||||
path = salt.utils.path.join(*parts)
|
||||
assert "{}: {}".format(idx, path) == "{}: {}".format(idx, expected)
|
||||
assert f"{idx}: {path}" == f"{idx}: {expected}"
|
||||
|
||||
@pytest.mark.skip(reason="Skipped until properly mocked")
|
||||
@pytest.mark.skip_on_windows
|
||||
|
@ -57,7 +58,7 @@ class PathJoinTestCase(TestCase):
|
|||
try:
|
||||
for idx, (parts, expected) in enumerate(self.WIN_PATHS):
|
||||
path = salt.utils.path.join(*parts)
|
||||
assert "{}: {}".format(idx, path) == "{}: {}".format(idx, expected)
|
||||
assert f"{idx}: {path}" == f"{idx}: {expected}"
|
||||
finally:
|
||||
self.__unpatch_path()
|
||||
|
||||
|
@ -79,14 +80,12 @@ class PathJoinTestCase(TestCase):
|
|||
assert actual == expected
|
||||
|
||||
def __patch_path(self):
|
||||
import imp
|
||||
|
||||
modules = list(self.BUILTIN_MODULES[:])
|
||||
modules.pop(modules.index("posix"))
|
||||
modules.append("nt")
|
||||
|
||||
code = """'''Salt unittest loaded NT module'''"""
|
||||
module = imp.new_module("nt")
|
||||
module = types.ModuleType("nt")
|
||||
exec(code, module.__dict__)
|
||||
sys.modules["nt"] = module
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue