mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #64560 from s0undt3ch/hotfix/merge-forward
[master] Merge 3006.x into master
This commit is contained in:
commit
9aeed74e96
49 changed files with 1757 additions and 599 deletions
23
.github/actions/get-pull-labels/action.yml
vendored
23
.github/actions/get-pull-labels/action.yml
vendored
|
@ -1,23 +0,0 @@
|
|||
---
|
||||
name: get-pull-labels
|
||||
description: Get Pull Labels
|
||||
inputs:
|
||||
pull-request:
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
labels:
|
||||
value: ${{ steps.get-pull-labels.outputs.labels }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get Pull Labels
|
||||
id: get-pull-labels
|
||||
shell: bash
|
||||
env:
|
||||
PULL_REQUEST: ${{ inputs.pull-request }}
|
||||
run: |
|
||||
labels=$(jq -c '[.labels[].name]' <<< $PULL_REQUEST)
|
||||
echo $labels
|
||||
echo "labels=$labels" >> "$GITHUB_OUTPUT"
|
46
.github/actions/get-pull-number/action.yml
vendored
46
.github/actions/get-pull-number/action.yml
vendored
|
@ -1,46 +0,0 @@
|
|||
|
||||
---
|
||||
name: get-pull-number
|
||||
description: Get Pull Number
|
||||
inputs:
|
||||
owner:
|
||||
type: string
|
||||
repo:
|
||||
type: string
|
||||
sha:
|
||||
type: string
|
||||
pull-number:
|
||||
default: null
|
||||
|
||||
outputs:
|
||||
number:
|
||||
value: ${{ steps.get-pull-number.outputs.number }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get Pull Number
|
||||
id: get-pull-number
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_OWNER: ${{ inputs.owner }}
|
||||
GITHUB_REPO: ${{ inputs.repo }}
|
||||
GITHUB_SHA: ${{ inputs.sha }}
|
||||
GITHUB_PULL_NUMBER: ${{ inputs.pull-number }}
|
||||
run: |
|
||||
if [ -z "$GITHUB_PULL_NUMBER" ]
|
||||
then
|
||||
echo "Searching For Pull Number"
|
||||
echo $GITHUB_OWNER
|
||||
echo $GITHUB_REPO
|
||||
echo $GITHUB_SHA
|
||||
pulls=$(gh api repos/$GITHUB_OWNER/$GITHUB_REPO/commits/$GITHUB_SHA/pulls)
|
||||
echo $pulls
|
||||
full_name=$GITHUB_OWNER/$GITHUB_REPO
|
||||
number=$(jq -c --arg r "$full_name" '[.[] | select(.url | contains($r))][0].number' <<< $pulls )
|
||||
else
|
||||
echo "Given Pull Number"
|
||||
number=$GITHUB_PULL_NUMBER
|
||||
fi
|
||||
echo $number
|
||||
echo "number=$number" >> "$GITHUB_OUTPUT"
|
30
.github/actions/get-pull-request/action.yml
vendored
30
.github/actions/get-pull-request/action.yml
vendored
|
@ -1,30 +0,0 @@
|
|||
|
||||
---
|
||||
name: get-pull-request
|
||||
description: Get Pull Request
|
||||
inputs:
|
||||
owner:
|
||||
type: string
|
||||
repo:
|
||||
type: string
|
||||
pull-number:
|
||||
type: number
|
||||
|
||||
outputs:
|
||||
pull-request:
|
||||
value: ${{ steps.get-pull-request.outputs.request }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get Pull Request
|
||||
id: get-pull-request
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_OWNER: ${{ inputs.owner }}
|
||||
GITHUB_REPO: ${{ inputs.repo }}
|
||||
GITHUB_PULL_NUMBER: ${{ inputs.pull-number }}
|
||||
run: |
|
||||
pull=$(gh api repos/$GITHUB_OWNER/$GITHUB_REPO/pulls/$GITHUB_PULL_NUMBER)
|
||||
echo $pull
|
||||
echo "request=$pull" >> "$GITHUB_OUTPUT"
|
152
.github/workflows/ci.yml
vendored
152
.github/workflows/ci.yml
vendored
|
@ -3,9 +3,16 @@
|
|||
---
|
||||
name: CI
|
||||
run-name: "CI (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('{0}: {1}', startsWith(github.event.ref, 'refs/tags') && 'tag' || 'branch', github.ref_name) }})"
|
||||
|
||||
on:
|
||||
push: {}
|
||||
pull_request: {}
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- unlabeled
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
|
||||
env:
|
||||
COLUMNS: 190
|
||||
|
@ -140,35 +147,13 @@ jobs:
|
|||
salt-version: ""
|
||||
validate-version: true
|
||||
|
||||
- name: Get Pull Number
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-number
|
||||
uses: ./.github/actions/get-pull-number
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
sha: ${{ github.sha }}
|
||||
pull-number: ${{ github.event.pull_request.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Request
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-request
|
||||
uses: ./.github/actions/get-pull-request
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
pull-number: ${{ steps.get-pull-number.outputs.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Labels
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Get Pull Request Test Labels
|
||||
id: get-pull-labels
|
||||
uses: ./.github/actions/get-pull-labels
|
||||
with:
|
||||
pull-request: ${{ steps.get-pull-request.outputs.pull-request }}
|
||||
if: ${{ github.event_name == 'pull_request'}}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
tools ci get-pr-test-labels --repository ${{ github.repository }}
|
||||
|
||||
- name: Write Changed Files To A Local File
|
||||
run:
|
||||
|
@ -550,7 +535,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
centos-7-pkg-tests:
|
||||
|
@ -567,7 +552,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
centosstream-8-pkg-tests:
|
||||
|
@ -584,7 +569,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
centosstream-9-pkg-tests:
|
||||
|
@ -601,7 +586,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
debian-10-pkg-tests:
|
||||
|
@ -618,7 +603,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
debian-11-pkg-tests:
|
||||
|
@ -635,7 +620,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
debian-11-arm64-pkg-tests:
|
||||
|
@ -652,7 +637,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
photonos-3-pkg-tests:
|
||||
|
@ -669,7 +654,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
photonos-4-pkg-tests:
|
||||
|
@ -686,7 +671,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: rpm
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2004-pkg-tests:
|
||||
|
@ -703,7 +688,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2004-arm64-pkg-tests:
|
||||
|
@ -720,7 +705,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2204-pkg-tests:
|
||||
|
@ -737,7 +722,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2204-arm64-pkg-tests:
|
||||
|
@ -754,7 +739,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: deb
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
macos-12-pkg-tests:
|
||||
|
@ -771,7 +756,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: macos
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2016-nsis-pkg-tests:
|
||||
|
@ -788,7 +773,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2016-msi-pkg-tests:
|
||||
|
@ -805,7 +790,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2019-nsis-pkg-tests:
|
||||
|
@ -822,7 +807,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2019-msi-pkg-tests:
|
||||
|
@ -839,7 +824,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2022-nsis-pkg-tests:
|
||||
|
@ -856,7 +841,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: NSIS
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2022-msi-pkg-tests:
|
||||
|
@ -873,7 +858,7 @@ jobs:
|
|||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
pkg-type: MSI
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2016:
|
||||
|
@ -891,8 +876,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2019:
|
||||
|
@ -910,8 +894,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
windows-2022:
|
||||
|
@ -929,8 +912,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
macos-12:
|
||||
|
@ -948,8 +930,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
almalinux-8:
|
||||
|
@ -967,8 +948,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
almalinux-9:
|
||||
|
@ -986,8 +966,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
amazonlinux-2:
|
||||
|
@ -1005,8 +984,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
archlinux-lts:
|
||||
|
@ -1024,8 +1002,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
centos-7:
|
||||
|
@ -1043,8 +1020,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
centosstream-8:
|
||||
|
@ -1062,8 +1038,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
centosstream-9:
|
||||
|
@ -1081,8 +1056,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
debian-10:
|
||||
|
@ -1100,8 +1074,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
debian-11:
|
||||
|
@ -1119,8 +1092,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
debian-11-arm64:
|
||||
|
@ -1138,8 +1110,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
fedora-37:
|
||||
|
@ -1157,8 +1128,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
fedora-38:
|
||||
|
@ -1176,8 +1146,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
opensuse-15:
|
||||
|
@ -1195,8 +1164,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
photonos-3:
|
||||
|
@ -1214,8 +1182,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
photonos-4:
|
||||
|
@ -1233,8 +1200,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2004:
|
||||
|
@ -1252,8 +1218,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2004-arm64:
|
||||
|
@ -1271,8 +1236,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2204:
|
||||
|
@ -1290,8 +1254,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
ubuntu-2204-arm64:
|
||||
|
@ -1309,8 +1272,7 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
set-pipeline-exit-status:
|
||||
|
|
57
.github/workflows/nightly.yml
vendored
57
.github/workflows/nightly.yml
vendored
|
@ -193,35 +193,13 @@ jobs:
|
|||
salt-version: ""
|
||||
validate-version: true
|
||||
|
||||
- name: Get Pull Number
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-number
|
||||
uses: ./.github/actions/get-pull-number
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
sha: ${{ github.sha }}
|
||||
pull-number: ${{ github.event.pull_request.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Request
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-request
|
||||
uses: ./.github/actions/get-pull-request
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
pull-number: ${{ steps.get-pull-number.outputs.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Labels
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Get Pull Request Test Labels
|
||||
id: get-pull-labels
|
||||
uses: ./.github/actions/get-pull-labels
|
||||
with:
|
||||
pull-request: ${{ steps.get-pull-request.outputs.pull-request }}
|
||||
if: ${{ github.event_name == 'pull_request'}}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
tools ci get-pr-test-labels --repository ${{ github.repository }}
|
||||
|
||||
- name: Write Changed Files To A Local File
|
||||
run:
|
||||
|
@ -950,7 +928,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -969,7 +946,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -988,7 +964,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1007,7 +982,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1026,7 +1000,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1045,7 +1018,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1064,7 +1036,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1083,7 +1054,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1102,7 +1072,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1121,7 +1090,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1140,7 +1108,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1159,7 +1126,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1178,7 +1144,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1197,7 +1162,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1216,7 +1180,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1235,7 +1198,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1254,7 +1216,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1273,7 +1234,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1292,7 +1252,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1311,7 +1270,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1330,7 +1288,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1349,7 +1306,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1368,7 +1324,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
|
57
.github/workflows/scheduled.yml
vendored
57
.github/workflows/scheduled.yml
vendored
|
@ -183,35 +183,13 @@ jobs:
|
|||
salt-version: ""
|
||||
validate-version: true
|
||||
|
||||
- name: Get Pull Number
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-number
|
||||
uses: ./.github/actions/get-pull-number
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
sha: ${{ github.sha }}
|
||||
pull-number: ${{ github.event.pull_request.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Request
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-request
|
||||
uses: ./.github/actions/get-pull-request
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
pull-number: ${{ steps.get-pull-number.outputs.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Labels
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Get Pull Request Test Labels
|
||||
id: get-pull-labels
|
||||
uses: ./.github/actions/get-pull-labels
|
||||
with:
|
||||
pull-request: ${{ steps.get-pull-request.outputs.pull-request }}
|
||||
if: ${{ github.event_name == 'pull_request'}}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
tools ci get-pr-test-labels --repository ${{ github.repository }}
|
||||
|
||||
- name: Write Changed Files To A Local File
|
||||
run:
|
||||
|
@ -934,7 +912,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -953,7 +930,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -972,7 +948,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -991,7 +966,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1010,7 +984,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1029,7 +1002,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1048,7 +1020,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1067,7 +1038,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1086,7 +1056,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1105,7 +1074,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1124,7 +1092,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1143,7 +1110,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1162,7 +1128,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1181,7 +1146,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1200,7 +1164,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1219,7 +1182,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1238,7 +1200,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1257,7 +1218,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1276,7 +1236,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1295,7 +1254,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1314,7 +1272,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1333,7 +1290,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
@ -1352,7 +1308,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
|
|
57
.github/workflows/staging.yml
vendored
57
.github/workflows/staging.yml
vendored
|
@ -173,35 +173,13 @@ jobs:
|
|||
salt-version: "${{ inputs.salt-version }}"
|
||||
validate-version: true
|
||||
|
||||
- name: Get Pull Number
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-number
|
||||
uses: ./.github/actions/get-pull-number
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
sha: ${{ github.sha }}
|
||||
pull-number: ${{ github.event.pull_request.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Request
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-request
|
||||
uses: ./.github/actions/get-pull-request
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
pull-number: ${{ steps.get-pull-number.outputs.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Labels
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Get Pull Request Test Labels
|
||||
id: get-pull-labels
|
||||
uses: ./.github/actions/get-pull-labels
|
||||
with:
|
||||
pull-request: ${{ steps.get-pull-request.outputs.pull-request }}
|
||||
if: ${{ github.event_name == 'pull_request'}}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
tools ci get-pr-test-labels --repository ${{ github.repository }}
|
||||
|
||||
- name: Check Existing Releases
|
||||
env:
|
||||
|
@ -940,7 +918,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -959,7 +936,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -978,7 +954,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -997,7 +972,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1016,7 +990,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1035,7 +1008,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1054,7 +1026,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1073,7 +1044,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1092,7 +1062,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1111,7 +1080,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1130,7 +1098,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1149,7 +1116,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1168,7 +1134,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1187,7 +1152,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1206,7 +1170,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1225,7 +1188,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1244,7 +1206,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1263,7 +1224,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1282,7 +1242,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1301,7 +1260,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1320,7 +1278,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1339,7 +1296,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
@ -1358,7 +1314,6 @@ jobs:
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.11.3
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
|
|
6
.github/workflows/templates/ci.yml.jinja
vendored
6
.github/workflows/templates/ci.yml.jinja
vendored
|
@ -1,12 +1,6 @@
|
|||
<%- extends 'layout.yml.jinja' %>
|
||||
<%- set pre_commit_version = "3.0.4" %>
|
||||
|
||||
<%- block on %>
|
||||
on:
|
||||
push: {}
|
||||
pull_request: {}
|
||||
<%- endblock on %>
|
||||
|
||||
|
||||
<%- block jobs %>
|
||||
<{- super() }>
|
||||
|
|
40
.github/workflows/templates/layout.yml.jinja
vendored
40
.github/workflows/templates/layout.yml.jinja
vendored
|
@ -5,7 +5,7 @@
|
|||
<%- set prepare_workflow_skip_pkg_test_suite = prepare_workflow_skip_pkg_test_suite|default("") %>
|
||||
<%- set prepare_workflow_skip_pkg_download_test_suite = prepare_workflow_skip_pkg_download_test_suite|default("") %>
|
||||
<%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %>
|
||||
<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ github.event_name == 'pull_request' }}") %>
|
||||
<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}") %>
|
||||
<%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %>
|
||||
<%- set gpg_key_id = "64CBBC8173D76B3F" %>
|
||||
<%- set prepare_actual_release = prepare_actual_release | default(False) %>
|
||||
|
@ -192,35 +192,13 @@ jobs:
|
|||
salt-version: "<{ prepare_workflow_salt_version_input }>"
|
||||
validate-version: true
|
||||
|
||||
- name: Get Pull Number
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-number
|
||||
uses: ./.github/actions/get-pull-number
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
sha: ${{ github.sha }}
|
||||
pull-number: ${{ github.event.pull_request.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Request
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: get-pull-request
|
||||
uses: ./.github/actions/get-pull-request
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
pull-number: ${{ steps.get-pull-number.outputs.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get Pull Labels
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Get Pull Request Test Labels
|
||||
id: get-pull-labels
|
||||
uses: ./.github/actions/get-pull-labels
|
||||
with:
|
||||
pull-request: ${{ steps.get-pull-request.outputs.pull-request }}
|
||||
if: ${{ github.event_name == 'pull_request'}}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
tools ci get-pr-test-labels --repository ${{ github.repository }}
|
||||
|
||||
<%- if prepare_actual_release %>
|
||||
|
||||
|
@ -263,7 +241,9 @@ jobs:
|
|||
- name: Define Jobs
|
||||
id: define-jobs
|
||||
run: |
|
||||
tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{ prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite }> ${{ github.event_name }} changed-files.json
|
||||
tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{
|
||||
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
|
||||
}> ${{ github.event_name }} changed-files.json
|
||||
|
||||
- name: Check Defined Jobs
|
||||
run: |
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||
skip-junit-reports: <{ skip_junit_reports_check }>
|
||||
|
||||
|
@ -43,7 +42,6 @@
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||
skip-junit-reports: <{ skip_junit_reports_check }>
|
||||
|
||||
|
@ -85,7 +83,6 @@
|
|||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||
skip-junit-reports: <{ skip_junit_reports_check }>
|
||||
|
||||
|
|
50
.github/workflows/test-action-macos.yml
vendored
50
.github/workflows/test-action-macos.yml
vendored
|
@ -42,11 +42,6 @@ on:
|
|||
type: string
|
||||
description: The onedir package name to use
|
||||
default: salt
|
||||
pull-labels:
|
||||
required: false
|
||||
type: string
|
||||
description: List of all the pull labels
|
||||
default: '["test:slow", "test:core"]'
|
||||
skip-code-coverage:
|
||||
required: false
|
||||
type: boolean
|
||||
|
@ -113,7 +108,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'noxfile.py') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
|
@ -226,7 +221,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'noxfile.py') }}
|
||||
# If we get a cache miss here it means the dependencies step failed to save the cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
@ -269,36 +264,9 @@ jobs:
|
|||
run: |
|
||||
sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin"
|
||||
|
||||
- name: Get Test Flags
|
||||
id: get-test-flags
|
||||
shell: bash
|
||||
env:
|
||||
PULL_LABELS: ${{ inputs.pull-labels }}
|
||||
run: |
|
||||
echo "$PULL_LABELS"
|
||||
# shellcheck disable=SC2086
|
||||
no_fast_tests="$(jq -c '. | any(index("test:no-fast"))' <<< $PULL_LABELS)"
|
||||
# shellcheck disable=SC2086
|
||||
slow_tests="$(jq -c '. | any(index("test:slow"))' <<< $PULL_LABELS)"
|
||||
# shellcheck disable=SC2086
|
||||
core_tests="$(jq -c '. | any(index("test:core"))' <<< $PULL_LABELS)"
|
||||
# shellcheck disable=SC2086
|
||||
flaky_jail_tests="$(jq -c '. | any(index("test:flaky-jail"))' <<< $PULL_LABELS)"
|
||||
echo "$no_fast_tests"
|
||||
echo "$slow_tests"
|
||||
echo "$core_tests"
|
||||
echo "$flaky_jail_tests"
|
||||
# shellcheck disable=SC2086
|
||||
{
|
||||
echo "no_fast_tests=$no_fast_tests";
|
||||
echo "slow_tests=$slow_tests";
|
||||
echo "core_tests=$core_tests";
|
||||
echo "flaky_jail_tests=$flaky_jail_tests";
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Run Fast/Changed Tests
|
||||
id: run-fast-changed-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }}
|
||||
env:
|
||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||
PRINT_TEST_SELECTION: "0"
|
||||
|
@ -315,7 +283,7 @@ jobs:
|
|||
|
||||
- name: Run Slow/Changed Tests
|
||||
id: run-slow-changed-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'false' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }}
|
||||
env:
|
||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||
PRINT_TEST_SELECTION: "0"
|
||||
|
@ -332,7 +300,7 @@ jobs:
|
|||
|
||||
- name: Run Core/Changed Tests
|
||||
id: run-core-changed-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'false' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }}
|
||||
env:
|
||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||
PRINT_TEST_SELECTION: "0"
|
||||
|
@ -349,7 +317,7 @@ jobs:
|
|||
|
||||
- name: Run Fast Tests
|
||||
id: run-fast-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'false' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
|
||||
env:
|
||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||
PRINT_TEST_SELECTION: "0"
|
||||
|
@ -365,7 +333,7 @@ jobs:
|
|||
|
||||
- name: Run Slow Tests
|
||||
id: run-slow-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
|
||||
env:
|
||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||
PRINT_TEST_SELECTION: "0"
|
||||
|
@ -381,7 +349,7 @@ jobs:
|
|||
|
||||
- name: Run Core Tests
|
||||
id: run-core-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
|
||||
env:
|
||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||
PRINT_TEST_SELECTION: "0"
|
||||
|
@ -397,7 +365,7 @@ jobs:
|
|||
|
||||
- name: Run Flaky Tests
|
||||
id: run-flaky-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
|
||||
env:
|
||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||
PRINT_TEST_SELECTION: "0"
|
||||
|
|
50
.github/workflows/test-action.yml
vendored
50
.github/workflows/test-action.yml
vendored
|
@ -37,11 +37,6 @@ on:
|
|||
type: string
|
||||
description: The onedir package name to use
|
||||
default: salt
|
||||
pull-labels:
|
||||
required: false
|
||||
type: string
|
||||
description: List of all the pull labels
|
||||
default: '["test:slow", "test:core"]'
|
||||
skip-code-coverage:
|
||||
required: false
|
||||
type: boolean
|
||||
|
@ -116,7 +111,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
|
@ -247,7 +242,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
# If we get a cache miss here it means the dependencies step failed to save the cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
@ -303,36 +298,9 @@ jobs:
|
|||
--nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \
|
||||
${{ matrix.tests-chunk }}
|
||||
|
||||
- name: Get Test Flags
|
||||
id: get-test-flags
|
||||
shell: bash
|
||||
env:
|
||||
PULL_LABELS: ${{ inputs.pull-labels }}
|
||||
run: |
|
||||
echo "$PULL_LABELS"
|
||||
# shellcheck disable=SC2086
|
||||
no_fast_tests="$(jq -c '. | any(index("test:no-fast"))' <<< $PULL_LABELS)"
|
||||
# shellcheck disable=SC2086
|
||||
slow_tests="$(jq -c '. | any(index("test:slow"))' <<< $PULL_LABELS)"
|
||||
# shellcheck disable=SC2086
|
||||
core_tests="$(jq -c '. | any(index("test:core"))' <<< $PULL_LABELS)"
|
||||
# shellcheck disable=SC2086
|
||||
flaky_jail_tests="$(jq -c '. | any(index("test:flaky-jail"))' <<< $PULL_LABELS)"
|
||||
echo "$no_fast_tests"
|
||||
echo "$slow_tests"
|
||||
echo "$core_tests"
|
||||
echo "$flaky_jail_tests"
|
||||
# shellcheck disable=SC2086
|
||||
{
|
||||
echo "no_fast_tests=$no_fast_tests";
|
||||
echo "slow_tests=$slow_tests";
|
||||
echo "core_tests=$core_tests";
|
||||
echo "flaky_jail_tests=$flaky_jail_tests";
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Run Fast/Changed Tests
|
||||
id: run-fast-changed-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
|
@ -341,7 +309,7 @@ jobs:
|
|||
|
||||
- name: Run Slow/Changed Tests
|
||||
id: run-slow-changed-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'false' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
|
@ -350,7 +318,7 @@ jobs:
|
|||
|
||||
- name: Run Core/Changed Tests
|
||||
id: run-core-changed-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'false' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
|
@ -359,7 +327,7 @@ jobs:
|
|||
|
||||
- name: Run Fast Tests
|
||||
id: run-fast-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.no_fast_tests == 'false' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
|
||||
|
@ -367,7 +335,7 @@ jobs:
|
|||
|
||||
- name: Run Slow Tests
|
||||
id: run-slow-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.slow_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
|
@ -375,7 +343,7 @@ jobs:
|
|||
|
||||
- name: Run Core Tests
|
||||
id: run-core-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.core_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
|
@ -383,7 +351,7 @@ jobs:
|
|||
|
||||
- name: Run Flaky Tests
|
||||
id: run-flaky-tests
|
||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }}
|
||||
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
|
||||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||
|
|
|
@ -78,7 +78,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
|
@ -184,7 +184,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
# If we get a cache miss here it means the dependencies step failed to save the cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
|
@ -184,7 +184,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
# If we get a cache miss here it means the dependencies step failed to save the cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
|
|
@ -83,7 +83,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
uses: actions/download-artifact@v3
|
||||
|
@ -189,7 +189,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
# If we get a cache miss here it means the dependencies step failed to save the cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
|
|
@ -96,7 +96,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
|
@ -213,7 +213,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
# If we get a cache miss here it means the dependencies step failed to save the cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
|
4
.github/workflows/test-packages-action.yml
vendored
4
.github/workflows/test-packages-action.yml
vendored
|
@ -97,7 +97,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
|
||||
- name: Download Onedir Tarball as an Artifact
|
||||
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
|
||||
|
@ -227,7 +227,7 @@ jobs:
|
|||
uses: actions/cache@v3
|
||||
with:
|
||||
path: nox.${{ inputs.distro-slug }}.tar.*
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }}
|
||||
key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}
|
||||
# If we get a cache miss here it means the dependencies step failed to save the cache
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
|
1
changelog/58667.fixed.md
Normal file
1
changelog/58667.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
fixes aptpkg module by checking for blank comps.
|
1
changelog/61173.fixed.md
Normal file
1
changelog/61173.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
fixed runner not having a proper exit code when runner modules throw an exception.
|
1
changelog/64211.fixed.md
Normal file
1
changelog/64211.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fix user.present state when groups is unset to ensure the groups are unchanged, as documented.
|
1
changelog/64439.added.md
Normal file
1
changelog/64439.added.md
Normal file
|
@ -0,0 +1 @@
|
|||
Added a script to automate setting up a 2nd minion in a user context on Windows
|
1
changelog/64514.fixed.md
Normal file
1
changelog/64514.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed salt-ssh state.* commands returning retcode 0 when state/pillar rendering fails
|
1
changelog/64539.fixed.md
Normal file
1
changelog/64539.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Added a `test:full` label in the salt repository, which, when selected, will force a full test run.
|
8
changelog/64547.added.md
Normal file
8
changelog/64547.added.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
Several fixes to the CI workflow:
|
||||
|
||||
* Don't override the `on` Jinja block on the `ci.yaml` template. This enables reacting to labels getting added/removed
|
||||
to/from pull requests.
|
||||
* Switch to using `tools` and re-use the event payload available instead of querying the GH API again to get the pull
|
||||
request labels
|
||||
* Concentrate test selection by labels to a single place
|
||||
* Enable code coverage on pull-requests by setting the `test:coverage` label
|
1
changelog/64553.changed.md
Normal file
1
changelog/64553.changed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Don't hardcode the python version on the Salt Package tests and on the `pkg/debian/salt-cloud.postinst` file
|
6
changelog/64553.removed.md
Normal file
6
changelog/64553.removed.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
Handle deprecation warnings:
|
||||
|
||||
* Switch to `FullArgSpec` since Py 3.11 no longer has `ArgSpec`, deprecated since Py 3.0
|
||||
* Stop using the deprecated `cgi` module
|
||||
* Stop using the deprecated `pipes` module
|
||||
* Stop using the deprecated `imp` module
|
171
doc/topics/windows/multi-minion.rst
Normal file
171
doc/topics/windows/multi-minion.rst
Normal file
|
@ -0,0 +1,171 @@
|
|||
=============================
|
||||
Multi-minion setup on Windows
|
||||
=============================
|
||||
|
||||
There may be a scenario where having a minion running in the context of the
|
||||
current, logged-in user would be useful. For example, the normal minion running
|
||||
under the service account would perform machine-wide, administrative tasks. The
|
||||
minion running under the user context could be launched when the user logs in
|
||||
and would be able to perform configuration tasks as if it were the user itself.
|
||||
|
||||
The steps required to do this are as follows:
|
||||
|
||||
1. Create new root_dir
|
||||
2. Set root_dir permissions
|
||||
3. Create directory structure
|
||||
4. Write minion config
|
||||
5. Start the minion
|
||||
6. Register the minion as a service (optional)
|
||||
|
||||
.. note::
|
||||
|
||||
The Salt Project has created a powershell script that will configure an
|
||||
additional minion on the system for you. It can be found in the root of the
|
||||
Salt installation. The script is named ``multi-minion.ps1``. You can get
|
||||
help on how to use the script by running the following in a PowerShell
|
||||
prompt:
|
||||
|
||||
``Get-Help .\multi-minion.ps1 -Detailed``
|
||||
|
||||
The following guide explains these steps in more detail.
|
||||
|
||||
1. Create new ``root_dir``
|
||||
--------------------------
|
||||
|
||||
The minion requires a root directory to store config, cache, logs, etc. The user
|
||||
must have full permissions to this directory. The easiest way to do this is to
|
||||
put the ``root_dir`` in the Local AppData directory (``$env:LocalAppData``).
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
New-Item -Path "$env:LocalAppData\Salt Project\Salt" -Type Directory
|
||||
|
||||
2. Set ``root_dir`` permissions
|
||||
-------------------------------
|
||||
|
||||
The user running Salt requires full access to the ``root_dir``. If you have
|
||||
placed the root_dir in a location that the user does not have access to, you'll
|
||||
need to give the user full permissions to that directory. Replace the
|
||||
<placeholder variables> in this example with your own configuration information.
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
$RootDir = "<new root_dir location>"
|
||||
$User = "<user running salt>"
|
||||
$acl = Get-Acl -Path "$RootDir"
|
||||
$access_rule = New-Object System.Security.AccessControl.FileSystemAccessRule($User, "Modify", "Allow")
|
||||
$acl.AddAccessRule($access_rule)
|
||||
Set-Acl -Path "$RootDir" -AclObject $acl
|
||||
|
||||
3. Create directory structure
|
||||
-----------------------------
|
||||
|
||||
Salt expects a certain directory structure to be present to avoid unnecessary
|
||||
messages in the logs. This is usually handled by the installer. Since you're
|
||||
running your own instance, you need to do it. Make sure the following
|
||||
directories are present:
|
||||
|
||||
- root_dir\\conf\\minion.d
|
||||
- root_dir\\conf\\pki
|
||||
- root_dir\\var\\log\\salt
|
||||
- root_dir\\var\\run
|
||||
- root_dir\\var\\cache\\salt\\minion\\extmods\\grains
|
||||
- root_dir\\var\\cache\\salt\\minion\\proc
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
$RootDir = "<new root_dir location>"
|
||||
$cache_dir = "$RootDir\var\cache\salt\minion"
|
||||
New-Item -Path "$RootDir\conf" -Type Directory
|
||||
New-Item -Path "$RootDir\conf\minion.d" -Type Directory
|
||||
New-Item -Path "$RootDir\conf\pki" -Type Directory
|
||||
New-Item -Path "$RootDir\var\log\salt" -Type Directory
|
||||
New-Item -Path "$RootDir\var\run" -Type Directory
|
||||
New-Item -Path "$cache_dir\extmods\grains" -Type Directory
|
||||
New-Item -Path "$cache_dir\proc" -Type Directory
|
||||
|
||||
4. Write minion config
|
||||
----------------------
|
||||
|
||||
The minion will need its own config, separate from the system minion config.
|
||||
This config tells the minion where everything is located in the file structure
|
||||
and also defines the master and minion id. Create a minion config file named
|
||||
``minion`` in the conf directory.
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
New-Item -Path "$env:LocalAppData\Salt Project\Salt\conf\minion" -Type File
|
||||
|
||||
Make sure the config file has at least the following contents:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
master: <ip address, dns name, etc>
|
||||
id: <minion id>
|
||||
|
||||
root_dir: <root_dir>
|
||||
log_file: <root_dir>\val\log\salt\minion
|
||||
utils_dirs:
|
||||
- <root_dir>\var\cache\salt\minion\extmods
|
||||
winrepo_dir: <root_dir>\srv\salt\win\repo
|
||||
winrepo_dir_ng: <root_dir>\srv\salt\win\repo-ng
|
||||
|
||||
file_roots:
|
||||
base:
|
||||
- <root_dir>\srv\salt
|
||||
- <root_dir>\srv\spm\salt
|
||||
|
||||
pillar_roots:
|
||||
base:
|
||||
- <root_dir>\srv\pillar
|
||||
- <root_dir>\srv\spm\pillar
|
||||
|
||||
thorium_roots:
|
||||
base:
|
||||
- <root_dir>\srv\thorium
|
||||
|
||||
5. Run the minion
|
||||
-----------------
|
||||
|
||||
Everything is now set up to run the minion. You can start the minion as you
|
||||
would normally, but you need to specify the full path to the config file you
|
||||
created above.
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
salt-minion.exe -c <root_dir>\conf
|
||||
|
||||
6. Register the minion as a service (optional)
|
||||
----------------------------------------------
|
||||
|
||||
You can also register the minion as a service, but you need to understand the
|
||||
implications of doing so.
|
||||
|
||||
- You will need to have administrator privileges to register this minion
|
||||
service.
|
||||
- You will need the password to the user account that will be running the
|
||||
minion.
|
||||
- If the user password changes, you will have to update the service definition
|
||||
to reflect the new password.
|
||||
- The minion runs all the time under the user context, whether that user is
|
||||
logged in or not.
|
||||
- This requires great trust from the user as the minion will be able to perform
|
||||
operations under the user's name without the user knowing, whether they are
|
||||
logged in or not.
|
||||
- If you decide to run the new minion under the Local System account, it might
|
||||
as well just be a normal minion.
|
||||
- The helper script does not support registering the second minion as a service.
|
||||
|
||||
To register the minion as a service, use the ``ssm.exe`` binary that came with
|
||||
the Salt installation. Run the following commands, replacing ``<service-name>``,
|
||||
``<root_dir>``, ``<user_name>``, and ``<password>`` as necessary:
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
ssm.exe install <service-name> "salt-minion.exe" "-c `"<root_dir>\conf`" -l quiet"
|
||||
ssm.exe set <service-name> Description "Salt Minion <user_name>"
|
||||
ssm.exe set <service-name> Start SERVICE_AUTO_START
|
||||
ssm.exe set <service-name> AppStopMethodConsole 24000
|
||||
ssm.exe set <service-name> AppStopMethodWindow 2000
|
||||
ssm.exe set <service-name> AppRestartDelay 60000
|
||||
ssm.exe set <service-name> ObjectName ".\<user_name>" "<password>"
|
121
pkg/tests/integration/test_multi_minion.py
Normal file
121
pkg/tests/integration/test_multi_minion.py
Normal file
|
@ -0,0 +1,121 @@
|
|||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
import psutil
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_unless_on_windows,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mm_script(install_salt):
|
||||
yield install_salt.ssm_bin.parent / "multi-minion.ps1"
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mm_conf(mm_script):
|
||||
yield pathlib.Path(os.getenv("LocalAppData"), "Salt Project", "Salt", "conf")
|
||||
subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-d"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
|
||||
|
||||
def test_script_present(mm_script):
|
||||
"""
|
||||
Ensure the multi-minion.ps1 file is present in the root of the installation
|
||||
"""
|
||||
assert mm_script.exists()
|
||||
|
||||
|
||||
def test_install(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion with default settings. Should create a minion config
|
||||
file in Local AppData
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '")],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("master: salt") > -1
|
||||
|
||||
|
||||
def test_install_master(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and set the master to spongebob
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-m", "spongebob"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("master: spongebob") > -1
|
||||
|
||||
|
||||
def test_install_prefix(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and add a prefix to the minion id
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-p", "squarepants"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("id: squarepants") > -1
|
||||
|
||||
|
||||
def test_install_log_level(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and set the log level in the log file to debug
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-l", "debug"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("log_level_logfile: debug") > -1
|
||||
|
||||
|
||||
def test_install_start(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and start that minion in a hidden process
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-s"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("master: salt") > -1
|
||||
|
||||
found = False
|
||||
for p in psutil.process_iter(["cmdline", "name"]):
|
||||
if p.info["name"] and p.info["name"] == "salt-minion.exe":
|
||||
if f"{mm_conf}" in p.info["cmdline"]:
|
||||
found = True
|
||||
assert found is True
|
|
@ -140,6 +140,33 @@ if ( Test-Path -Path "$RELENV_DIR" ) {
|
|||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Remove MSI build files
|
||||
#-------------------------------------------------------------------------------
|
||||
$files = @(
|
||||
"msi/CustomAction01/CustomAction01.CA.dll",
|
||||
"msi/CustomAction01/CustomAction01.dll",
|
||||
"msi/CustomAction01/CustomAction01.pdb",
|
||||
"msi/Product-discovered-files-config.wixobj",
|
||||
"msi/Product-discovered-files-config.wxs",
|
||||
"msi/Product-discovered-files-x64.wixobj",
|
||||
"msi/Product-discovered-files-x64.wxs",
|
||||
"msi/Product.wixobj"
|
||||
)
|
||||
$files | ForEach-Object {
|
||||
if ( Test-Path -Path "$SCRIPT_DIR\$_" ) {
|
||||
# Use .net, the powershell function is asynchronous
|
||||
Write-Host "Removing $_`: " -NoNewline
|
||||
[System.IO.File]::Delete("$SCRIPT_DIR\$_")
|
||||
if ( ! (Test-Path -Path "$SCRIPT_DIR\$_") ) {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Script Completed
|
||||
#-------------------------------------------------------------------------------
|
||||
|
|
5
pkg/windows/multi-minion.cmd
Normal file
5
pkg/windows/multi-minion.cmd
Normal file
|
@ -0,0 +1,5 @@
|
|||
:: This is a helper script for multi-minion.ps1.
|
||||
:: See multi-minion.ps1 for documentation
|
||||
@ echo off
|
||||
Set "CurDir=%~dp0"
|
||||
PowerShell -ExecutionPolicy RemoteSigned -File "%CurDir%\multi-minion.ps1" %*
|
363
pkg/windows/multi-minion.ps1
Normal file
363
pkg/windows/multi-minion.ps1
Normal file
|
@ -0,0 +1,363 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Script for setting up an additional salt-minion on a machine with Salt installed
|
||||
|
||||
.DESCRIPTION
|
||||
This script configures an additional minion on a machine that already has a Salt
|
||||
installation using one of the Salt packages. It sets up the directory structure
|
||||
required by Salt. It also lays down a minion config to be used
|
||||
by the Salt minion. Additionaly, this script can start the new minion in a
|
||||
hidden window.
|
||||
|
||||
You can also remove the multiminion setup with this script.
|
||||
|
||||
This script does not need to be run with Administrator privileges
|
||||
|
||||
If a minion that was configured with this script is already running, the script
|
||||
will exit.
|
||||
|
||||
The following example sets up a minion for the current logged in account. It
|
||||
configures the minion to connect to the master at 192.168.0.10
|
||||
|
||||
.EXAMPLE
|
||||
PS>multi-minion.ps1 -Master 192.168.0.10
|
||||
PS>multi-minion.ps1 -m 192.168.0.10
|
||||
|
||||
The following example sets up a minion for the current logged in account. It
|
||||
configures the minion to connect to the master at 192.168.0.10. It also prefixes
|
||||
the minion id with `spongebob`
|
||||
|
||||
.EXAMPLE
|
||||
PS>multi-minion.ps1 -Master 192.168.0.10 -Prefix spongebob
|
||||
PS>multi-minion.ps1 -m 192.168.0.10 -p spongebob
|
||||
|
||||
The following example sets up a minion for the current logged in account. It
|
||||
configures the minion to connect to the master at 192.168.0.10. It also starts
|
||||
the minion in a hidden window:
|
||||
|
||||
.EXAMPLE
|
||||
PS>multi-minion.ps1 -Master 192.168.0.10 -Start
|
||||
PS>multi-minion.ps1 -m 192.168.0.10 -s
|
||||
|
||||
The following example removes a multiminion for the current running account:
|
||||
|
||||
.EXAMPLE
|
||||
PS>multi-minion.ps1 -Delete
|
||||
PS>multi-minion.ps1 -d
|
||||
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("m")]
|
||||
# The master to connect to. This can be an ip address or an fqdn. Default
|
||||
# is salt
|
||||
[String] $Master = "salt",
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("p")]
|
||||
# The prefix to the minion id to differentiate it from the installed system
|
||||
# minion. The default is $env:COMPUTERNAME. It might be helpful to use the
|
||||
# minion id of the system minion if you know it
|
||||
[String] $Prefix = "$env:COMPUTERNAME",
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("s")]
|
||||
# Start the minion in the background
|
||||
[Switch] $Start,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("l")]
|
||||
[ValidateSet(
|
||||
"all",
|
||||
"garbage",
|
||||
"trace",
|
||||
"debug",
|
||||
"profile",
|
||||
"info",
|
||||
"warning",
|
||||
"error",
|
||||
"critical",
|
||||
"quiet"
|
||||
)]
|
||||
# Set the log level for log file. Default is `warning`
|
||||
[String] $LogLevel = "warning",
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[Alias("d")]
|
||||
# Remove the multi-minion in the current account. All other parameters are
|
||||
# ignored
|
||||
[Switch] $Remove
|
||||
)
|
||||
|
||||
########################### Script Variables #############################
|
||||
$user_name = [System.Security.Principal.WindowsIdentity]::GetCurrent().Name.Split("\")[-1].ToLower()
|
||||
$salt_bin = "$env:ProgramFiles\Salt Project\Salt\salt-minion.exe"
|
||||
$root_dir = "$env:LocalAppData\Salt Project\Salt"
|
||||
$cache_dir = "$root_dir\var\cache\salt\minion"
|
||||
$minion_id = "$Prefix-$user_name"
|
||||
|
||||
########################### Script Functions #############################
|
||||
function Test-FileLock {
|
||||
param (
|
||||
[parameter(Mandatory=$true)]
|
||||
# The path to the file to check
|
||||
[string]$Path
|
||||
)
|
||||
if ((Test-Path -Path $Path) -eq $false) {
|
||||
return $false
|
||||
}
|
||||
$oFile = New-Object System.IO.FileInfo $Path
|
||||
try {
|
||||
$oStream = $oFile.Open([System.IO.FileMode]::Open, [System.IO.FileAccess]::ReadWrite, [System.IO.FileShare]::None)
|
||||
if ($oStream) {
|
||||
$oStream.Close()
|
||||
}
|
||||
return $false
|
||||
} catch {
|
||||
# file is locked by a process.
|
||||
return $true
|
||||
}
|
||||
}
|
||||
|
||||
################################ Remove ##################################
|
||||
if ( $Remove ) {
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
Write-Host "Removing multi-minion"
|
||||
Write-Host "Root Dir: $root_dir"
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
|
||||
# Stop salt-minion service if running
|
||||
$processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle
|
||||
$processes | ForEach-Object {
|
||||
if ( $_.commandline -like "*$root_dir*" ) {
|
||||
Write-Host "Killing process: " -NoNewline
|
||||
$process = Get-Process -Id $_.handle
|
||||
$process.Kill()
|
||||
if ( $process.HasExited ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Check for locked log file
|
||||
# The log file will be locked until the running process releases it
|
||||
while (Test-FileLock -Path "$root_dir\var\log\salt\minion") {
|
||||
Start-Sleep -Seconds 1
|
||||
}
|
||||
|
||||
# Remove Directory
|
||||
if ( Test-Path -Path $root_dir) {
|
||||
Write-Host "Removing Root Dir: " -NoNewline
|
||||
Remove-Item -Path $root_dir -Force -Recurse
|
||||
|
||||
if ( !(Test-Path -Path $root_dir) ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
# Remind to delete keys from master
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
Write-Host "Multi-Minion successfully removed"
|
||||
Write-Host ">>>>> Don't forget to remove keys from the master <<<<<"
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
exit 0
|
||||
}
|
||||
|
||||
################################ EXISTING CHECK ################################
|
||||
|
||||
# See there is already a running minion
|
||||
$running = $false
|
||||
$processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle
|
||||
$processes | ForEach-Object {
|
||||
if ( $_.commandline -like "*$root_dir*" ) {
|
||||
$running = $true
|
||||
}
|
||||
}
|
||||
if ( $running ) {
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
Write-Host "Multi-Minion"
|
||||
Write-Host "A minion is already running for this user"
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
exit 0
|
||||
}
|
||||
|
||||
################################### INSTALL ####################################
|
||||
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
Write-Host "Installing Multi-Minion"
|
||||
Write-Host "Master: $Master"
|
||||
Write-Host "Minion ID: $minion_id"
|
||||
Write-Host "Root Directory: $root_dir"
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
|
||||
# Create Root Directory Structure
|
||||
if ( !( Test-Path -path "$root_dir" ) ) {
|
||||
Write-Host "Creating Root Dir: " -NoNewline
|
||||
New-Item -Path "$root_dir" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$root_dir" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Config dir
|
||||
if ( !( Test-Path -path "$root_dir\conf" ) ) {
|
||||
Write-Host "Creating config dir: " -NoNewline
|
||||
New-Item -Path "$root_dir\conf" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$root_dir\conf" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Minion.d dir
|
||||
if ( !( Test-Path -path "$root_dir\conf\minion.d" ) ) {
|
||||
Write-Host "Creating minion.d dir: " -NoNewline
|
||||
New-Item -Path "$root_dir\conf\minion.d" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$root_dir\conf\minion.d" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# PKI dir
|
||||
if ( !( Test-Path -path "$root_dir\conf\pki" ) ) {
|
||||
Write-Host "Creating pki dir: " -NoNewline
|
||||
New-Item -Path "$root_dir\conf\pki" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$root_dir\conf\pki" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Log dir
|
||||
if ( !( Test-Path -path "$root_dir\var\log\salt" ) ) {
|
||||
Write-Host "Creating log dir: " -NoNewline
|
||||
New-Item -Path "$root_dir\var\log\salt" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$root_dir\var\log\salt" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Run dir
|
||||
if ( !( Test-Path -path "$root_dir\var\run" ) ) {
|
||||
Write-Host "Creating run dir: " -NoNewline
|
||||
New-Item -Path "$root_dir\var\run" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$root_dir\var\run" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Extmods grains dir
|
||||
if ( !( Test-Path -path "$cache_dir\extmods\grains" ) ) {
|
||||
Write-Host "Creating extmods grains dir: " -NoNewline
|
||||
New-Item -Path "$cache_dir\extmods\grains" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$cache_dir\extmods\grains" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Proc dir
|
||||
if ( !( Test-Path -path "$cache_dir\proc" ) ) {
|
||||
Write-Host "Creating proc dir: " -NoNewline
|
||||
New-Item -Path "$cache_dir\proc" -Type Directory | Out-Null
|
||||
if ( Test-Path -path "$cache_dir\proc" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Write minion config
|
||||
Write-Host "Writing minion config: " -NoNewline
|
||||
Set-Content -Force -Path "$root_dir\conf\minion" -Value "master: $Master"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "id: $minion_id"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "root_dir: $root_dir"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "log_file: $root_dir\var\log\salt\minion"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "log_level_logfile: $LogLevel"
|
||||
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "utils_dirs:"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\var\cache\salt\minion\extmods\utils"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "winrepo_dir: $root_dir\srv\salt\win\repo"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "winrepo_dir_ng: $root_dir\srv\salt\win\repo-ng"
|
||||
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "file_roots:"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\salt"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\spm\salt"
|
||||
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "pillar_roots:"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\pillar"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\spm\pillar"
|
||||
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value "thorium_roots:"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:"
|
||||
Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\thorium"
|
||||
|
||||
if ( Test-Path -path "$root_dir\conf\minion" ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Start the minion
|
||||
if ( $Start ) {
|
||||
Write-Host "Starting minion process: " -NoNewline
|
||||
Start-Process -FilePath "`"$salt_bin`"" `
|
||||
-ArgumentList "-c","`"$root_dir\conf`"" `
|
||||
-WindowStyle Hidden
|
||||
# Verify running minion
|
||||
$running = $false
|
||||
$processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle
|
||||
$processes | ForEach-Object {
|
||||
if ( $_.commandline -like "*$root_dir*" ) {
|
||||
$running = $true
|
||||
}
|
||||
}
|
||||
if ( $running ) {
|
||||
Write-Host "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
||||
Write-Host "Multi-Minion installed successfully"
|
||||
if ( ! $Start ) {
|
||||
Write-Host ""
|
||||
Write-Host "To start the minion, run the following command:"
|
||||
Write-Host "salt-minion -c `"$root_dir\conf`""
|
||||
Write-Host ""
|
||||
Write-Host "To start the minion in the background, run the following command:"
|
||||
Write-Host "Start-Process -FilePath salt-minion.exe -ArgumentList `"-c`",'`"$root_dir\conf`"' -WindowStyle Hidden"
|
||||
}
|
||||
Write-Host "######################################################################" -ForegroundColor Cyan
|
|
@ -165,6 +165,25 @@ if ( ! (Test-Path -Path "$BUILD_DIR\ssm.exe") ) {
|
|||
}
|
||||
}
|
||||
|
||||
# Copy the multiminion scripts to the Build directory
|
||||
$scripts = @(
|
||||
"multi-minion.cmd",
|
||||
"multi-minion.ps1"
|
||||
)
|
||||
$scripts | ForEach-Object {
|
||||
if (!(Test-Path -Path "$BUILD_DIR\$_")) {
|
||||
Write-Host "Copying $_ to the Build directory: " -NoNewline
|
||||
Copy-Item -Path "$SCRIPT_DIR\$_" -Destination "$BUILD_DIR\$_"
|
||||
if (Test-Path -Path "$BUILD_DIR\$_") {
|
||||
Write-Result "Success" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Result "Failed" -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Copy VCRedist 2013 to the prereqs directory
|
||||
New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null
|
||||
Write-Host "Copying VCRedist 2013 $ARCH_X to prereqs: " -NoNewline
|
||||
$file = "vcredist_$ARCH_X`_2013.exe"
|
||||
|
@ -176,6 +195,7 @@ if ( Test-Path -Path "$PREREQ_DIR\$file" ) {
|
|||
exit 1
|
||||
}
|
||||
|
||||
# Copy Universal C Runtimes to the prereqs directory
|
||||
Write-Host "Copying Universal C Runtimes $ARCH_X to prereqs: " -NoNewline
|
||||
$file = "ucrt_$ARCH_X.zip"
|
||||
Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file"
|
||||
|
|
|
@ -31,13 +31,19 @@ class SaltRun(salt.utils.parsers.SaltRunOptionParser):
|
|||
if check_user(self.config["user"]):
|
||||
pr = salt.utils.profile.activate_profile(profiling_enabled)
|
||||
try:
|
||||
ret = runner.run()
|
||||
ret = runner.run(full_return=True)
|
||||
# In older versions ret['data']['retcode'] was used
|
||||
# for signaling the return code. This has been
|
||||
# changed for the orchestrate runner, but external
|
||||
# runners might still use it. For this reason, we
|
||||
# also check ret['data']['retcode'] if
|
||||
# ret['retcode'] is not available.
|
||||
if (
|
||||
isinstance(ret, dict)
|
||||
and "return" in ret
|
||||
and "retcode" not in ret
|
||||
):
|
||||
ret = ret["return"]
|
||||
if isinstance(ret, dict) and "retcode" in ret:
|
||||
self.exit(ret["retcode"])
|
||||
elif isinstance(ret, dict) and "retcode" in ret.get("data", {}):
|
||||
|
|
|
@ -272,7 +272,7 @@ class SyncClientMixin(ClientStateMixin):
|
|||
return True
|
||||
|
||||
try:
|
||||
return self.opts["{}_returns".format(class_name)]
|
||||
return self.opts[f"{class_name}_returns"]
|
||||
except KeyError:
|
||||
# No such option, assume this isn't one we care about gating and
|
||||
# just return True.
|
||||
|
@ -299,7 +299,7 @@ class SyncClientMixin(ClientStateMixin):
|
|||
tag = low.get("__tag__", salt.utils.event.tagify(jid, prefix=self.tag_prefix))
|
||||
|
||||
data = {
|
||||
"fun": "{}.{}".format(self.client, fun),
|
||||
"fun": f"{self.client}.{fun}",
|
||||
"jid": jid,
|
||||
"user": low.get("__user__", "UNKNOWN"),
|
||||
}
|
||||
|
@ -407,6 +407,7 @@ class SyncClientMixin(ClientStateMixin):
|
|||
traceback.format_exc(),
|
||||
)
|
||||
data["success"] = False
|
||||
data["retcode"] = 1
|
||||
|
||||
if self.store_job:
|
||||
try:
|
||||
|
@ -507,7 +508,17 @@ class AsyncClientMixin(ClientStateMixin):
|
|||
|
||||
@classmethod
|
||||
def _proc_function(
|
||||
cls, *, instance, opts, fun, low, user, tag, jid, daemonize=True
|
||||
cls,
|
||||
*,
|
||||
instance,
|
||||
opts,
|
||||
fun,
|
||||
low,
|
||||
user,
|
||||
tag,
|
||||
jid,
|
||||
daemonize=True,
|
||||
full_return=False,
|
||||
):
|
||||
"""
|
||||
Run this method in a multiprocess target to execute the function
|
||||
|
@ -532,7 +543,7 @@ class AsyncClientMixin(ClientStateMixin):
|
|||
low["__user__"] = user
|
||||
low["__tag__"] = tag
|
||||
|
||||
return instance.low(fun, low)
|
||||
return instance.low(fun, low, full_return=full_return)
|
||||
|
||||
def cmd_async(self, low):
|
||||
"""
|
||||
|
|
|
@ -304,7 +304,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
}
|
||||
if self.opts.get("rand_thin_dir"):
|
||||
self.defaults["thin_dir"] = os.path.join(
|
||||
"/var/tmp", ".{}".format(uuid.uuid4().hex[:6])
|
||||
"/var/tmp", f".{uuid.uuid4().hex[:6]}"
|
||||
)
|
||||
self.opts["ssh_wipe"] = "True"
|
||||
self.returners = salt.loader.returners(self.opts, {})
|
||||
|
@ -454,9 +454,9 @@ class SSH(MultiprocessingStateMixin):
|
|||
priv = self.opts.get(
|
||||
"ssh_priv", os.path.join(self.opts["pki_dir"], "ssh", "salt-ssh.rsa")
|
||||
)
|
||||
pub = "{}.pub".format(priv)
|
||||
pub = f"{priv}.pub"
|
||||
with salt.utils.files.fopen(pub, "r") as fp_:
|
||||
return "{} rsa root@master".format(fp_.read().split()[1])
|
||||
return f"{fp_.read().split()[1]} rsa root@master"
|
||||
|
||||
def key_deploy(self, host, ret):
|
||||
"""
|
||||
|
@ -500,7 +500,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
mods=self.mods,
|
||||
fsclient=self.fsclient,
|
||||
thin=self.thin,
|
||||
**target
|
||||
**target,
|
||||
)
|
||||
if salt.utils.path.which("ssh-copy-id"):
|
||||
# we have ssh-copy-id, use it!
|
||||
|
@ -516,7 +516,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
mods=self.mods,
|
||||
fsclient=self.fsclient,
|
||||
thin=self.thin,
|
||||
**target
|
||||
**target,
|
||||
)
|
||||
stdout, stderr, retcode = single.cmd_block()
|
||||
try:
|
||||
|
@ -543,7 +543,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
fsclient=self.fsclient,
|
||||
thin=self.thin,
|
||||
mine=mine,
|
||||
**target
|
||||
**target,
|
||||
)
|
||||
ret = {"id": single.id}
|
||||
stdout, stderr, retcode = single.run()
|
||||
|
@ -552,6 +552,11 @@ class SSH(MultiprocessingStateMixin):
|
|||
data = salt.utils.json.find_json(stdout)
|
||||
if len(data) < 2 and "local" in data:
|
||||
ret["ret"] = data["local"]
|
||||
try:
|
||||
# Ensure a reported local retcode is kept
|
||||
retcode = data["local"]["retcode"]
|
||||
except (KeyError, TypeError):
|
||||
pass
|
||||
else:
|
||||
ret["ret"] = {
|
||||
"stdout": stdout,
|
||||
|
@ -564,7 +569,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
"stderr": stderr,
|
||||
"retcode": retcode,
|
||||
}
|
||||
que.put(ret)
|
||||
que.put((ret, retcode))
|
||||
|
||||
def handle_ssh(self, mine=False):
|
||||
"""
|
||||
|
@ -608,7 +613,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
"fun": "",
|
||||
"id": host,
|
||||
}
|
||||
yield {host: no_ret}
|
||||
yield {host: no_ret}, 1
|
||||
continue
|
||||
args = (
|
||||
que,
|
||||
|
@ -622,11 +627,12 @@ class SSH(MultiprocessingStateMixin):
|
|||
running[host] = {"thread": routine}
|
||||
continue
|
||||
ret = {}
|
||||
retcode = 0
|
||||
try:
|
||||
ret = que.get(False)
|
||||
ret, retcode = que.get(False)
|
||||
if "id" in ret:
|
||||
returned.add(ret["id"])
|
||||
yield {ret["id"]: ret["ret"]}
|
||||
yield {ret["id"]: ret["ret"]}, retcode
|
||||
except queue.Empty:
|
||||
pass
|
||||
for host in running:
|
||||
|
@ -636,10 +642,10 @@ class SSH(MultiprocessingStateMixin):
|
|||
# last checked
|
||||
try:
|
||||
while True:
|
||||
ret = que.get(False)
|
||||
ret, retcode = que.get(False)
|
||||
if "id" in ret:
|
||||
returned.add(ret["id"])
|
||||
yield {ret["id"]: ret["ret"]}
|
||||
yield {ret["id"]: ret["ret"]}, retcode
|
||||
except queue.Empty:
|
||||
pass
|
||||
|
||||
|
@ -650,7 +656,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
)
|
||||
ret = {"id": host, "ret": error}
|
||||
log.error(error)
|
||||
yield {ret["id"]: ret["ret"]}
|
||||
yield {ret["id"]: ret["ret"]}, 1
|
||||
running[host]["thread"].join()
|
||||
rets.add(host)
|
||||
for host in rets:
|
||||
|
@ -705,8 +711,8 @@ class SSH(MultiprocessingStateMixin):
|
|||
jid, job_load
|
||||
)
|
||||
|
||||
for ret in self.handle_ssh(mine=mine):
|
||||
host = next(iter(ret.keys()))
|
||||
for ret, _ in self.handle_ssh(mine=mine):
|
||||
host = next(iter(ret))
|
||||
self.cache_job(jid, host, ret[host], fun)
|
||||
if self.event:
|
||||
id_, data = next(iter(ret.items()))
|
||||
|
@ -792,22 +798,16 @@ class SSH(MultiprocessingStateMixin):
|
|||
)
|
||||
|
||||
if self.opts.get("verbose"):
|
||||
msg = "Executing job with jid {}".format(jid)
|
||||
msg = f"Executing job with jid {jid}"
|
||||
print(msg)
|
||||
print("-" * len(msg) + "\n")
|
||||
print("")
|
||||
sret = {}
|
||||
outputter = self.opts.get("output", "nested")
|
||||
final_exit = 0
|
||||
for ret in self.handle_ssh():
|
||||
host = next(iter(ret.keys()))
|
||||
if isinstance(ret[host], dict):
|
||||
host_ret = ret[host].get("retcode", 0)
|
||||
if host_ret != 0:
|
||||
final_exit = 1
|
||||
else:
|
||||
# Error on host
|
||||
final_exit = 1
|
||||
for ret, retcode in self.handle_ssh():
|
||||
host = next(iter(ret))
|
||||
final_exit = max(final_exit, retcode)
|
||||
|
||||
self.cache_job(jid, host, ret[host], fun)
|
||||
ret = self.key_deploy(host, ret)
|
||||
|
@ -883,7 +883,7 @@ class Single:
|
|||
remote_port_forwards=None,
|
||||
winrm=False,
|
||||
ssh_options=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
# Get mine setting and mine_functions if defined in kwargs (from roster)
|
||||
self.mine = mine
|
||||
|
@ -1017,9 +1017,7 @@ class Single:
|
|||
"""
|
||||
check if the thindir exists on the remote machine
|
||||
"""
|
||||
stdout, stderr, retcode = self.shell.exec_cmd(
|
||||
"test -d {}".format(self.thin_dir)
|
||||
)
|
||||
stdout, stderr, retcode = self.shell.exec_cmd(f"test -d {self.thin_dir}")
|
||||
if retcode != 0:
|
||||
return False
|
||||
return True
|
||||
|
@ -1131,7 +1129,7 @@ class Single:
|
|||
self.id,
|
||||
fsclient=self.fsclient,
|
||||
minion_opts=self.minion_opts,
|
||||
**self.target
|
||||
**self.target,
|
||||
)
|
||||
|
||||
opts_pkg = pre_wrapper["test.opts_pkg"]() # pylint: disable=E1102
|
||||
|
@ -1217,7 +1215,7 @@ class Single:
|
|||
self.id,
|
||||
fsclient=self.fsclient,
|
||||
minion_opts=self.minion_opts,
|
||||
**self.target
|
||||
**self.target,
|
||||
)
|
||||
wrapper.fsclient.opts["cachedir"] = opts["cachedir"]
|
||||
self.wfuncs = salt.loader.ssh_wrapper(opts, wrapper, self.context)
|
||||
|
@ -1265,7 +1263,7 @@ class Single:
|
|||
else:
|
||||
result = self.wfuncs[self.fun](*self.args, **self.kwargs)
|
||||
except TypeError as exc:
|
||||
result = "TypeError encountered executing {}: {}".format(self.fun, exc)
|
||||
result = f"TypeError encountered executing {self.fun}: {exc}"
|
||||
log.error(result, exc_info_on_loglevel=logging.DEBUG)
|
||||
retcode = 1
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
|
@ -1274,6 +1272,10 @@ class Single:
|
|||
)
|
||||
log.error(result, exc_info_on_loglevel=logging.DEBUG)
|
||||
retcode = 1
|
||||
|
||||
# Ensure retcode from wrappers is respected, especially state render exceptions
|
||||
retcode = max(retcode, self.context.get("retcode", 0))
|
||||
|
||||
# Mimic the json data-structure that "salt-call --local" will
|
||||
# emit (as seen in ssh_py_shim.py)
|
||||
if isinstance(result, dict) and "local" in result:
|
||||
|
@ -1288,7 +1290,7 @@ class Single:
|
|||
"""
|
||||
if self.target.get("sudo"):
|
||||
sudo = (
|
||||
"sudo -p '{}'".format(salt.client.ssh.shell.SUDO_PROMPT)
|
||||
f"sudo -p '{salt.client.ssh.shell.SUDO_PROMPT}'"
|
||||
if self.target.get("passwd")
|
||||
else "sudo"
|
||||
)
|
||||
|
@ -1360,20 +1362,18 @@ ARGS = {arguments}\n'''.format(
|
|||
script_args = shlex.split(str(script_args))
|
||||
args = " {}".format(" ".join([shlex.quote(str(el)) for el in script_args]))
|
||||
if extension == "ps1":
|
||||
ret = self.shell.exec_cmd('"powershell {}"'.format(script))
|
||||
ret = self.shell.exec_cmd(f'"powershell {script}"')
|
||||
else:
|
||||
if not self.winrm:
|
||||
ret = self.shell.exec_cmd(
|
||||
"/bin/sh '{}{}'{}".format(pre_dir, script, args)
|
||||
)
|
||||
ret = self.shell.exec_cmd(f"/bin/sh '{pre_dir}{script}'{args}")
|
||||
else:
|
||||
ret = saltwinshell.call_python(self, script)
|
||||
|
||||
# Remove file from target system
|
||||
if not self.winrm:
|
||||
self.shell.exec_cmd("rm '{}{}'".format(pre_dir, script))
|
||||
self.shell.exec_cmd(f"rm '{pre_dir}{script}'")
|
||||
else:
|
||||
self.shell.exec_cmd("del {}".format(script))
|
||||
self.shell.exec_cmd(f"del {script}")
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -1461,7 +1461,7 @@ ARGS = {arguments}\n'''.format(
|
|||
while re.search(RSTR_RE, stderr):
|
||||
stderr = re.split(RSTR_RE, stderr, 1)[1].strip()
|
||||
else:
|
||||
return "ERROR: {}".format(error), stderr, retcode
|
||||
return f"ERROR: {error}", stderr, retcode
|
||||
|
||||
# FIXME: this discards output from ssh_shim if the shim succeeds. It should
|
||||
# always save the shim output regardless of shim success or failure.
|
||||
|
@ -1521,7 +1521,7 @@ ARGS = {arguments}\n'''.format(
|
|||
# If RSTR is not seen in both stdout and stderr then there
|
||||
# was a thin deployment problem.
|
||||
return (
|
||||
"ERROR: Failure deploying ext_mods: {}".format(stdout),
|
||||
f"ERROR: Failure deploying ext_mods: {stdout}",
|
||||
stderr,
|
||||
retcode,
|
||||
)
|
||||
|
@ -1689,7 +1689,7 @@ def mod_data(fsclient):
|
|||
files = fsclient.file_list(env)
|
||||
for ref in sync_refs:
|
||||
mods_data = {}
|
||||
pref = "_{}".format(ref)
|
||||
pref = f"_{ref}"
|
||||
for fn_ in sorted(files):
|
||||
if fn_.startswith(pref):
|
||||
if fn_.endswith((".py", ".so", ".pyx")):
|
||||
|
@ -1711,9 +1711,7 @@ def mod_data(fsclient):
|
|||
ver_base = salt.utils.stringutils.to_bytes(ver_base)
|
||||
|
||||
ver = hashlib.sha1(ver_base).hexdigest()
|
||||
ext_tar_path = os.path.join(
|
||||
fsclient.opts["cachedir"], "ext_mods.{}.tgz".format(ver)
|
||||
)
|
||||
ext_tar_path = os.path.join(fsclient.opts["cachedir"], f"ext_mods.{ver}.tgz")
|
||||
mods = {"version": ver, "file": ext_tar_path}
|
||||
if os.path.isfile(ext_tar_path):
|
||||
return mods
|
||||
|
@ -1762,7 +1760,7 @@ def _convert_args(args):
|
|||
for key in list(arg.keys()):
|
||||
if key == "__kwarg__":
|
||||
continue
|
||||
converted.append("{}={}".format(key, arg[key]))
|
||||
converted.append(f"{key}={arg[key]}")
|
||||
else:
|
||||
converted.append(arg)
|
||||
return converted
|
||||
|
|
|
@ -8,6 +8,7 @@ import time
|
|||
|
||||
import salt.client.ssh.shell
|
||||
import salt.client.ssh.state
|
||||
import salt.defaults.exitcodes
|
||||
import salt.loader
|
||||
import salt.minion
|
||||
import salt.roster
|
||||
|
@ -54,7 +55,7 @@ def _ssh_state(chunks, st_kwargs, kwargs, test=False):
|
|||
cmd,
|
||||
fsclient=__context__["fileclient"],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs
|
||||
**st_kwargs,
|
||||
)
|
||||
single.shell.send(trans_tar, "{}/salt_state.tgz".format(__opts__["thin_dir"]))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
@ -84,14 +85,14 @@ def _set_retcode(ret, highstate=None):
|
|||
"""
|
||||
|
||||
# Set default retcode to 0
|
||||
__context__["retcode"] = 0
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_OK
|
||||
|
||||
if isinstance(ret, list):
|
||||
__context__["retcode"] = 1
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return
|
||||
if not salt.utils.state.check_result(ret, highstate=highstate):
|
||||
|
||||
__context__["retcode"] = 2
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_FAILURE
|
||||
|
||||
|
||||
def _check_pillar(kwargs, pillar=None):
|
||||
|
@ -182,6 +183,11 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs):
|
|||
__context__["fileclient"],
|
||||
context=__context__.value(),
|
||||
) as st_:
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.push_active()
|
||||
mods = _parse_mods(mods)
|
||||
high_data, errors = st_.render_highstate(
|
||||
|
@ -198,12 +204,14 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs):
|
|||
errors += ext_errors
|
||||
errors += st_.state.verify_high(high_data)
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
high_data, req_in_errors = st_.state.requisite_in(high_data)
|
||||
errors += req_in_errors
|
||||
high_data = st_.state.apply_exclude(high_data)
|
||||
# Verify that the high data is structurally sound
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
# Compile and verify the raw chunks
|
||||
chunks = st_.state.compile_high_data(high_data)
|
||||
|
@ -236,7 +244,7 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs):
|
|||
cmd,
|
||||
fsclient=__context__["fileclient"],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs
|
||||
**st_kwargs,
|
||||
)
|
||||
single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"]))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
@ -316,7 +324,7 @@ def _check_queue(queue, kwargs):
|
|||
else:
|
||||
conflict = running(concurrent=kwargs.get("concurrent", False))
|
||||
if conflict:
|
||||
__context__["retcode"] = 1
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return conflict
|
||||
|
||||
|
||||
|
@ -384,7 +392,7 @@ def low(data, **kwargs):
|
|||
cmd,
|
||||
fsclient=__context__["fileclient"],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs
|
||||
**st_kwargs,
|
||||
)
|
||||
single.shell.send(trans_tar, "{}/salt_state.tgz".format(__opts__["thin_dir"]))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
@ -474,7 +482,7 @@ def high(data, **kwargs):
|
|||
cmd,
|
||||
fsclient=__context__["fileclient"],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs
|
||||
**st_kwargs,
|
||||
)
|
||||
single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"]))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
@ -550,7 +558,7 @@ def request(mods=None, **kwargs):
|
|||
try:
|
||||
if salt.utils.platform.is_windows():
|
||||
# Make sure cache file isn't read-only
|
||||
__salt__["cmd.run"]('attrib -R "{}"'.format(notify_path))
|
||||
__salt__["cmd.run"](f'attrib -R "{notify_path}"')
|
||||
with salt.utils.files.fopen(notify_path, "w+b") as fp_:
|
||||
salt.payload.dump(req, fp_)
|
||||
except OSError:
|
||||
|
@ -614,7 +622,7 @@ def clear_request(name=None):
|
|||
try:
|
||||
if salt.utils.platform.is_windows():
|
||||
# Make sure cache file isn't read-only
|
||||
__salt__["cmd.run"]('attrib -R "{}"'.format(notify_path))
|
||||
__salt__["cmd.run"](f'attrib -R "{notify_path}"')
|
||||
with salt.utils.files.fopen(notify_path, "w+b") as fp_:
|
||||
salt.payload.dump(req, fp_)
|
||||
except OSError:
|
||||
|
@ -681,6 +689,11 @@ def highstate(test=None, **kwargs):
|
|||
__context__["fileclient"],
|
||||
context=__context__.value(),
|
||||
) as st_:
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks(context=__context__.value())
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
|
@ -692,7 +705,7 @@ def highstate(test=None, **kwargs):
|
|||
# Check for errors
|
||||
for chunk in chunks:
|
||||
if not isinstance(chunk, dict):
|
||||
__context__["retcode"] = 1
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return chunks
|
||||
|
||||
roster = salt.roster.Roster(opts, opts.get("roster", "flat"))
|
||||
|
@ -717,7 +730,7 @@ def highstate(test=None, **kwargs):
|
|||
cmd,
|
||||
fsclient=__context__["fileclient"],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs
|
||||
**st_kwargs,
|
||||
)
|
||||
single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"]))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
@ -766,9 +779,19 @@ def top(topfn, test=None, **kwargs):
|
|||
__context__["fileclient"],
|
||||
context=__context__.value(),
|
||||
) as st_:
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.opts["state_top"] = os.path.join("salt://", topfn)
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks(context=__context__.value())
|
||||
# Check for errors
|
||||
for chunk in chunks:
|
||||
if not isinstance(chunk, dict):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return chunks
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
|
@ -798,7 +821,7 @@ def top(topfn, test=None, **kwargs):
|
|||
cmd,
|
||||
fsclient=__context__["fileclient"],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs
|
||||
**st_kwargs,
|
||||
)
|
||||
single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"]))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
@ -839,8 +862,17 @@ def show_highstate(**kwargs):
|
|||
__context__["fileclient"],
|
||||
context=__context__.value(),
|
||||
) as st_:
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.push_active()
|
||||
chunks = st_.compile_highstate(context=__context__.value())
|
||||
# Check for errors
|
||||
if not isinstance(chunks, dict):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return chunks
|
||||
_cleanup_slsmod_high_data(chunks)
|
||||
return chunks
|
||||
|
||||
|
@ -864,6 +896,11 @@ def show_lowstate(**kwargs):
|
|||
__context__["fileclient"],
|
||||
context=__context__.value(),
|
||||
) as st_:
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks(context=__context__.value())
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
|
@ -925,7 +962,7 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
|
|||
) as st_:
|
||||
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = 5
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += __pillar__["_errors"]
|
||||
return err
|
||||
|
@ -943,7 +980,7 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
|
|||
# but it is required to get the unit tests to pass.
|
||||
errors.extend(req_in_errors)
|
||||
if errors:
|
||||
__context__["retcode"] = 1
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
chunks = st_.state.compile_high_data(high_)
|
||||
chunk = [x for x in chunks if x.get("__id__", "") == id_]
|
||||
|
@ -988,6 +1025,11 @@ def show_sls(mods, saltenv="base", test=None, **kwargs):
|
|||
__context__["fileclient"],
|
||||
context=__context__.value(),
|
||||
) as st_:
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.push_active()
|
||||
mods = _parse_mods(mods)
|
||||
high_data, errors = st_.render_highstate(
|
||||
|
@ -997,12 +1039,14 @@ def show_sls(mods, saltenv="base", test=None, **kwargs):
|
|||
errors += ext_errors
|
||||
errors += st_.state.verify_high(high_data)
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
high_data, req_in_errors = st_.state.requisite_in(high_data)
|
||||
errors += req_in_errors
|
||||
high_data = st_.state.apply_exclude(high_data)
|
||||
# Verify that the high data is structurally sound
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
_cleanup_slsmod_high_data(high_data)
|
||||
return high_data
|
||||
|
@ -1036,6 +1080,11 @@ def show_low_sls(mods, saltenv="base", test=None, **kwargs):
|
|||
__context__["fileclient"],
|
||||
context=__context__.value(),
|
||||
) as st_:
|
||||
if not _check_pillar(kwargs, st_.opts["pillar"]):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE
|
||||
err = ["Pillar failed to render with the following messages:"]
|
||||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.push_active()
|
||||
mods = _parse_mods(mods)
|
||||
high_data, errors = st_.render_highstate(
|
||||
|
@ -1045,12 +1094,14 @@ def show_low_sls(mods, saltenv="base", test=None, **kwargs):
|
|||
errors += ext_errors
|
||||
errors += st_.state.verify_high(high_data)
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
high_data, req_in_errors = st_.state.requisite_in(high_data)
|
||||
errors += req_in_errors
|
||||
high_data = st_.state.apply_exclude(high_data)
|
||||
# Verify that the high data is structurally sound
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
ret = st_.state.compile_high_data(high_data)
|
||||
_cleanup_slsmod_low_data(ret)
|
||||
|
@ -1080,6 +1131,7 @@ def show_top(**kwargs):
|
|||
errors = []
|
||||
errors += st_.verify_tops(top_data)
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
matches = st_.top_matches(top_data)
|
||||
return matches
|
||||
|
@ -1110,7 +1162,7 @@ def single(fun, name, test=None, **kwargs):
|
|||
# state.fun -> [state, fun]
|
||||
comps = fun.split(".")
|
||||
if len(comps) < 2:
|
||||
__context__["retcode"] = 1
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return "Invalid function passed"
|
||||
|
||||
# Create the low chunk, using kwargs as a base
|
||||
|
@ -1133,7 +1185,7 @@ def single(fun, name, test=None, **kwargs):
|
|||
# Verify the low chunk
|
||||
err = st_.verify_data(kwargs)
|
||||
if err:
|
||||
__context__["retcode"] = 1
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return err
|
||||
|
||||
# Must be a list of low-chunks
|
||||
|
@ -1175,7 +1227,7 @@ def single(fun, name, test=None, **kwargs):
|
|||
cmd,
|
||||
fsclient=__context__["fileclient"],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs
|
||||
**st_kwargs,
|
||||
)
|
||||
|
||||
# Copy the tar down
|
||||
|
|
|
@ -2083,7 +2083,7 @@ def del_repo(repo, **kwargs):
|
|||
|
||||
s_comps = set(source.comps)
|
||||
r_comps = set(repo_comps)
|
||||
if s_comps.intersection(r_comps):
|
||||
if s_comps.intersection(r_comps) or (not s_comps and not r_comps):
|
||||
deleted_from[source.file] = 0
|
||||
source.comps = list(s_comps.difference(r_comps))
|
||||
if not source.comps:
|
||||
|
@ -2104,7 +2104,7 @@ def del_repo(repo, **kwargs):
|
|||
|
||||
s_comps = set(source.comps)
|
||||
r_comps = set(repo_comps)
|
||||
if s_comps.intersection(r_comps):
|
||||
if s_comps.intersection(r_comps) or (not s_comps and not r_comps):
|
||||
deleted_from[source.file] = 0
|
||||
source.comps = list(s_comps.difference(r_comps))
|
||||
if not source.comps:
|
||||
|
|
|
@ -203,11 +203,11 @@ class Runner(RunnerClient):
|
|||
arg = self.opts.get("fun", None)
|
||||
docs = super().get_docs(arg)
|
||||
for fun in sorted(docs):
|
||||
display_output("{}:".format(fun), "text", self.opts)
|
||||
display_output(f"{fun}:", "text", self.opts)
|
||||
print(docs[fun])
|
||||
|
||||
# TODO: move to mixin whenever we want a salt-wheel cli
|
||||
def run(self):
|
||||
def run(self, full_return=False):
|
||||
"""
|
||||
Execute the runner sequence
|
||||
"""
|
||||
|
@ -306,19 +306,20 @@ class Runner(RunnerClient):
|
|||
tag=async_pub["tag"],
|
||||
jid=async_pub["jid"],
|
||||
daemonize=False,
|
||||
full_return=full_return,
|
||||
)
|
||||
except salt.exceptions.SaltException as exc:
|
||||
with salt.utils.event.get_event("master", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{
|
||||
"success": False,
|
||||
"return": "{}".format(exc),
|
||||
"return": f"{exc}",
|
||||
"retcode": 254,
|
||||
"fun": self.opts["fun"],
|
||||
"fun_args": fun_args,
|
||||
"jid": self.jid,
|
||||
},
|
||||
tag="salt/run/{}/ret".format(self.jid),
|
||||
tag=f"salt/run/{self.jid}/ret",
|
||||
)
|
||||
# Attempt to grab documentation
|
||||
if "fun" in low:
|
||||
|
@ -329,7 +330,7 @@ class Runner(RunnerClient):
|
|||
# If we didn't get docs returned then
|
||||
# return the `not availble` message.
|
||||
if not ret:
|
||||
ret = "{}".format(exc)
|
||||
ret = f"{exc}"
|
||||
if not self.opts.get("quiet", False):
|
||||
display_output(ret, "nested", self.opts)
|
||||
else:
|
||||
|
|
|
@ -137,7 +137,7 @@ def _changes(
|
|||
if _gid not in dupe_groups:
|
||||
dupe_groups[_gid] = []
|
||||
dupe_groups[_gid].append(lusr["groups"][idx])
|
||||
if not remove_groups:
|
||||
if not remove_groups or groups is None and not optional_groups:
|
||||
wanted_groups = sorted(set(wanted_groups + lusr["groups"]))
|
||||
if uid and lusr["uid"] != uid:
|
||||
change["uid"] = uid
|
||||
|
|
85
tests/pytests/functional/cli/test_salt_run_.py
Normal file
85
tests/pytests/functional/cli/test_salt_run_.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
import salt.version
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def test_salt_run_exception_exit(salt_run_cli):
|
||||
"""
|
||||
test that the exitcode is 1 when an exception is
|
||||
thrown in a salt runner
|
||||
"""
|
||||
ret = salt_run_cli.run(
|
||||
"error.error", "name='Exception'", "message='This is an error.'"
|
||||
)
|
||||
assert ret.returncode == 1
|
||||
|
||||
|
||||
def test_salt_run_non_exception_exit(salt_run_cli):
|
||||
"""
|
||||
Test standard exitcode and output when runner works.
|
||||
"""
|
||||
ret = salt_run_cli.run("test.stdout_print")
|
||||
assert ret.returncode == 0
|
||||
assert ret.stdout == 'foo\n"bar"\n'
|
||||
|
||||
|
||||
def test_versions_report(salt_run_cli):
|
||||
"""
|
||||
test salt-run --versions-report
|
||||
"""
|
||||
expected = salt.version.versions_information()
|
||||
# sanitize expected of unnnecessary whitespace
|
||||
for _, section in expected.items():
|
||||
for key in section:
|
||||
if isinstance(section[key], str):
|
||||
section[key] = section[key].strip()
|
||||
|
||||
ret = salt_run_cli.run("--versions-report")
|
||||
assert ret.returncode == 0
|
||||
assert ret.stdout
|
||||
ret_lines = ret.stdout.split("\n")
|
||||
|
||||
assert ret_lines
|
||||
# sanitize lines
|
||||
ret_lines = [line.strip() for line in ret_lines]
|
||||
|
||||
for header in expected:
|
||||
assert f"{header}:" in ret_lines
|
||||
|
||||
ret_dict = {}
|
||||
expected_keys = set()
|
||||
for line in ret_lines:
|
||||
if not line:
|
||||
continue
|
||||
if line.endswith(":"):
|
||||
assert not expected_keys
|
||||
current_header = line.rstrip(":")
|
||||
assert current_header in expected
|
||||
ret_dict[current_header] = {}
|
||||
expected_keys = set(expected[current_header].keys())
|
||||
else:
|
||||
key, *value_list = line.split(":", 1)
|
||||
assert value_list
|
||||
assert len(value_list) == 1
|
||||
value = value_list[0].strip()
|
||||
if value == "Not Installed":
|
||||
value = None
|
||||
ret_dict[current_header][key] = value
|
||||
assert key in expected_keys
|
||||
expected_keys.remove(key)
|
||||
assert not expected_keys
|
||||
if os.environ.get("ONEDIR_TESTRUN", "0") == "0":
|
||||
# Stop any more testing
|
||||
return
|
||||
|
||||
assert "relenv" in ret_dict["Dependency Versions"]
|
||||
assert "Salt Extensions" in ret_dict
|
||||
|
||||
|
||||
def test_salt_run_version(salt_run_cli):
|
||||
expected = salt.version.__version__
|
||||
ret = salt_run_cli.run("--version")
|
||||
assert f"cli_salt_run.py {expected}\n" == ret.stdout
|
|
@ -35,7 +35,7 @@ class Key:
|
|||
keydir = pathlib.Path("/etc", "apt", "keyrings")
|
||||
if not keydir.is_dir():
|
||||
keydir.mkdir()
|
||||
aptpkg.add_repo_key("salt://{}".format(self.keyname), aptkey=self.aptkey)
|
||||
aptpkg.add_repo_key(f"salt://{self.keyname}", aptkey=self.aptkey)
|
||||
|
||||
def del_key(self):
|
||||
aptpkg.del_repo_key(keyid="0E08A149DE57BFBE", aptkey=self.aptkey)
|
||||
|
@ -98,6 +98,38 @@ def revert_repo_file(tmp_path):
|
|||
aptpkg.refresh_db()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def build_repo_file():
|
||||
source_path = "/etc/apt/sources.list.d/source_test_list.list"
|
||||
try:
|
||||
test_repos = [
|
||||
"deb [signed-by=/etc/apt/keyrings/salt-archive-keyring-2023.gpg arch=amd64] https://repo.saltproject.io/salt/py3/ubuntu/22.04/amd64/latest jammy main",
|
||||
"deb http://dist.list stable/all/",
|
||||
]
|
||||
with salt.utils.files.fopen(source_path, "w+") as fp:
|
||||
fp.write("\n".join(test_repos))
|
||||
yield source_path
|
||||
finally:
|
||||
if os.path.exists(source_path):
|
||||
os.remove(source_path)
|
||||
|
||||
|
||||
def get_repos_from_file(source_path):
|
||||
"""
|
||||
Get list of repos from repo in source_path
|
||||
"""
|
||||
test_repos = []
|
||||
try:
|
||||
with salt.utils.files.fopen(source_path) as fp:
|
||||
for line in fp:
|
||||
test_repos.append(line.strip())
|
||||
except FileNotFoundError as error:
|
||||
pytest.skip(f"Missing {error.filename}")
|
||||
if not test_repos:
|
||||
pytest.skip("Did not detect an APT repo")
|
||||
return test_repos
|
||||
|
||||
|
||||
def get_current_repo(multiple_comps=False):
|
||||
"""
|
||||
Get a repo currently in sources.list
|
||||
|
@ -121,7 +153,7 @@ def get_current_repo(multiple_comps=False):
|
|||
else:
|
||||
break
|
||||
except FileNotFoundError as error:
|
||||
pytest.skip("Missing {}".format(error.filename))
|
||||
pytest.skip(f"Missing {error.filename}")
|
||||
if not test_repo:
|
||||
pytest.skip("Did not detect an APT repo")
|
||||
return test_repo, comps
|
||||
|
@ -195,18 +227,20 @@ def test_get_repos_doesnot_exist():
|
|||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_del_repo(revert_repo_file):
|
||||
@pytest.mark.skip_if_not_root
|
||||
def test_del_repo(build_repo_file):
|
||||
"""
|
||||
Test aptpkg.del_repo when passing repo
|
||||
that exists. And checking correct error
|
||||
is returned when it no longer exists.
|
||||
"""
|
||||
test_repo, comps = get_current_repo()
|
||||
ret = aptpkg.del_repo(repo=test_repo)
|
||||
assert "Repo '{}' has been removed".format(test_repo)
|
||||
with pytest.raises(salt.exceptions.CommandExecutionError) as exc:
|
||||
test_repos = get_repos_from_file(build_repo_file)
|
||||
for test_repo in test_repos:
|
||||
ret = aptpkg.del_repo(repo=test_repo)
|
||||
assert "Repo {} doesn't exist".format(test_repo) in exc.value.message
|
||||
assert f"Repo '{test_repo}' has been removed"
|
||||
with pytest.raises(salt.exceptions.CommandExecutionError) as exc:
|
||||
ret = aptpkg.del_repo(repo=test_repo)
|
||||
assert f"Repo {test_repo} doesn't exist" in exc.value.message
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
|
@ -242,6 +276,7 @@ def test__expand_repo_def(grains):
|
|||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.skip_if_not_root
|
||||
def test_mod_repo(revert_repo_file):
|
||||
"""
|
||||
Test aptpkg.mod_repo when the repo exists.
|
||||
|
@ -252,10 +287,11 @@ def test_mod_repo(revert_repo_file):
|
|||
ret = aptpkg.mod_repo(repo=test_repo, comments=msg)
|
||||
assert sorted(ret[list(ret.keys())[0]]["comps"]) == sorted(comps)
|
||||
ret = file.grep("/etc/apt/sources.list", msg)
|
||||
assert "#{}".format(msg) in ret["stdout"]
|
||||
assert f"#{msg}" in ret["stdout"]
|
||||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.skip_if_not_root
|
||||
def test_mod_repo_no_file(tmp_path, revert_repo_file):
|
||||
"""
|
||||
Test aptpkg.mod_repo when the file does not exist.
|
||||
|
@ -284,6 +320,7 @@ def add_key(request, get_key_file):
|
|||
@pytest.mark.parametrize("get_key_file", KEY_FILES, indirect=True)
|
||||
@pytest.mark.parametrize("add_key", [False, True], indirect=True)
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.skip_if_not_root
|
||||
def test_get_repo_keys(get_key_file, add_key):
|
||||
"""
|
||||
Test aptpkg.get_repo_keys when aptkey is False and True
|
||||
|
@ -297,6 +334,7 @@ def test_get_repo_keys(get_key_file, add_key):
|
|||
|
||||
@pytest.mark.parametrize("key", [False, True])
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.skip_if_not_root
|
||||
def test_get_repo_keys_keydir_not_exist(key):
|
||||
"""
|
||||
Test aptpkg.get_repo_keys when aptkey is False and True
|
||||
|
@ -319,7 +357,7 @@ def test_add_del_repo_key(get_key_file, aptkey):
|
|||
and using both binary and armored gpg keys
|
||||
"""
|
||||
try:
|
||||
assert aptpkg.add_repo_key("salt://{}".format(get_key_file), aptkey=aptkey)
|
||||
assert aptpkg.add_repo_key(f"salt://{get_key_file}", aptkey=aptkey)
|
||||
keyfile = pathlib.Path("/etc", "apt", "keyrings", get_key_file)
|
||||
if not aptkey:
|
||||
assert keyfile.is_file()
|
||||
|
|
|
@ -117,7 +117,6 @@ def test_user_present_when_home_dir_does_not_18843(states, existing_account):
|
|||
ret = states.user.present(
|
||||
name=existing_account.username,
|
||||
home=existing_account.info.home,
|
||||
remove_groups=False,
|
||||
)
|
||||
assert ret.result is True
|
||||
assert pathlib.Path(existing_account.info.home).is_dir()
|
||||
|
@ -228,7 +227,6 @@ def test_user_present_unicode(states, username, subtests):
|
|||
roomnumber="①②③",
|
||||
workphone="١٢٣٤",
|
||||
homephone="६७८",
|
||||
remove_groups=False,
|
||||
)
|
||||
assert ret.result is True
|
||||
|
||||
|
@ -350,7 +348,7 @@ def test_user_present_change_gid_but_keep_group(
|
|||
|
||||
@pytest.mark.skip_unless_on_windows
|
||||
def test_user_present_existing(states, username):
|
||||
win_profile = "C:\\User\\{}".format(username)
|
||||
win_profile = f"C:\\User\\{username}"
|
||||
win_logonscript = "C:\\logon.vbs"
|
||||
win_description = "Test User Account"
|
||||
ret = states.user.present(
|
||||
|
@ -362,7 +360,7 @@ def test_user_present_existing(states, username):
|
|||
)
|
||||
assert ret.result is True
|
||||
|
||||
win_profile = "C:\\Users\\{}".format(username)
|
||||
win_profile = f"C:\\Users\\{username}"
|
||||
win_description = "Temporary Account"
|
||||
ret = states.user.present(
|
||||
name=username,
|
||||
|
@ -429,3 +427,75 @@ def test_user_present_change_optional_groups(
|
|||
user_info = modules.user.info(username)
|
||||
assert user_info
|
||||
assert user_info["groups"] == [group_1.name]
|
||||
|
||||
|
||||
@pytest.mark.skip_unless_on_linux(reason="underlying functionality only runs on Linux")
|
||||
def test_user_present_no_groups(modules, states, username):
|
||||
"""
|
||||
test user.present when groups arg is not
|
||||
included by the group is created in another
|
||||
state. Re-run the states to ensure there are
|
||||
not changes and it is idempotent.
|
||||
"""
|
||||
groups = ["testgroup1", "testgroup2"]
|
||||
try:
|
||||
ret = states.group.present(name=username, gid=61121)
|
||||
assert ret.result is True
|
||||
|
||||
ret = states.user.present(
|
||||
name=username,
|
||||
uid=61121,
|
||||
gid=61121,
|
||||
)
|
||||
assert ret.result is True
|
||||
assert ret.changes["groups"] == [username]
|
||||
assert ret.changes["name"] == username
|
||||
|
||||
ret = states.group.present(
|
||||
name=groups[0],
|
||||
members=[username],
|
||||
)
|
||||
assert ret.changes["members"] == [username]
|
||||
|
||||
ret = states.group.present(
|
||||
name=groups[1],
|
||||
members=[username],
|
||||
)
|
||||
assert ret.changes["members"] == [username]
|
||||
|
||||
user_info = modules.user.info(username)
|
||||
assert user_info
|
||||
assert user_info["groups"] == [username, groups[0], groups[1]]
|
||||
|
||||
# run again, expecting no changes
|
||||
ret = states.group.present(name=username)
|
||||
assert ret.result is True
|
||||
assert ret.changes == {}
|
||||
|
||||
ret = states.user.present(
|
||||
name=username,
|
||||
)
|
||||
assert ret.result is True
|
||||
assert ret.changes == {}
|
||||
|
||||
ret = states.group.present(
|
||||
name=groups[0],
|
||||
members=[username],
|
||||
)
|
||||
assert ret.result is True
|
||||
assert ret.changes == {}
|
||||
|
||||
ret = states.group.present(
|
||||
name=groups[1],
|
||||
members=[username],
|
||||
)
|
||||
assert ret.result is True
|
||||
assert ret.changes == {}
|
||||
|
||||
user_info = modules.user.info(username)
|
||||
assert user_info
|
||||
assert user_info["groups"] == [username, groups[0], groups[1]]
|
||||
finally:
|
||||
for group in groups:
|
||||
ret = states.group.absent(name=group)
|
||||
assert ret.result is True
|
||||
|
|
|
@ -119,21 +119,21 @@ def test_reactor_is_leader(
|
|||
When leader is set to false reactor should timeout/not do anything.
|
||||
"""
|
||||
ret = salt_run_cli.run("reactor.is_leader")
|
||||
assert ret.returncode == 0
|
||||
assert ret.returncode == 1
|
||||
assert (
|
||||
"salt.exceptions.CommandExecutionError: Reactor system is not running."
|
||||
in ret.stdout
|
||||
)
|
||||
|
||||
ret = salt_run_cli.run("reactor.set_leader", value=True)
|
||||
assert ret.returncode == 0
|
||||
assert ret.returncode == 1
|
||||
assert (
|
||||
"salt.exceptions.CommandExecutionError: Reactor system is not running."
|
||||
in ret.stdout
|
||||
)
|
||||
|
||||
ret = salt_run_cli.run("reactor.is_leader")
|
||||
assert ret.returncode == 0
|
||||
assert ret.returncode == 1
|
||||
assert (
|
||||
"salt.exceptions.CommandExecutionError: Reactor system is not running."
|
||||
in ret.stdout
|
||||
|
@ -220,7 +220,7 @@ def test_reactor_is_leader(
|
|||
|
||||
# Let's just confirm the engine is not running once again(because the config file is deleted by now)
|
||||
ret = salt_run_cli.run("reactor.is_leader")
|
||||
assert ret.returncode == 0
|
||||
assert ret.returncode == 1
|
||||
assert (
|
||||
"salt.exceptions.CommandExecutionError: Reactor system is not running."
|
||||
in ret.stdout
|
||||
|
|
|
@ -9,6 +9,7 @@ import pytest
|
|||
|
||||
import salt.utils.files
|
||||
import salt.utils.yaml
|
||||
from salt.defaults.exitcodes import EX_AGGREGATE
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
|
@ -74,15 +75,13 @@ def test_set_path(salt_ssh_cli, tmp_path, salt_ssh_roster_file):
|
|||
roster_data = salt.utils.yaml.safe_load(rfh)
|
||||
roster_data["localhost"].update(
|
||||
{
|
||||
"set_path": "$PATH:/usr/local/bin/:{}".format(path),
|
||||
"set_path": f"$PATH:/usr/local/bin/:{path}",
|
||||
}
|
||||
)
|
||||
with salt.utils.files.fopen(roster_file, "w") as wfh:
|
||||
salt.utils.yaml.safe_dump(roster_data, wfh)
|
||||
|
||||
ret = salt_ssh_cli.run(
|
||||
"--roster-file={}".format(roster_file), "environ.get", "PATH"
|
||||
)
|
||||
ret = salt_ssh_cli.run(f"--roster-file={roster_file}", "environ.get", "PATH")
|
||||
assert ret.returncode == 0
|
||||
assert path in ret.data
|
||||
|
||||
|
@ -97,6 +96,29 @@ def test_tty(salt_ssh_cli, tmp_path, salt_ssh_roster_file):
|
|||
roster_data["localhost"].update({"tty": True})
|
||||
with salt.utils.files.fopen(roster_file, "w") as wfh:
|
||||
salt.utils.yaml.safe_dump(roster_data, wfh)
|
||||
ret = salt_ssh_cli.run("--roster-file={}".format(roster_file), "test.ping")
|
||||
ret = salt_ssh_cli.run(f"--roster-file={roster_file}", "test.ping")
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
|
||||
|
||||
def test_retcode_exe_run_fail(salt_ssh_cli):
|
||||
"""
|
||||
Verify salt-ssh passes through the retcode it receives.
|
||||
"""
|
||||
ret = salt_ssh_cli.run("file.touch", "/tmp/non/ex/is/tent")
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
assert isinstance(ret.data, dict)
|
||||
assert "Error running 'file.touch': No such file or directory" in ret.data["stderr"]
|
||||
assert ret.data["retcode"] == 1
|
||||
|
||||
|
||||
def test_retcode_exe_run_exception(salt_ssh_cli):
|
||||
"""
|
||||
Verify salt-ssh passes through the retcode it receives
|
||||
when an exception is thrown. (Ref #50727)
|
||||
"""
|
||||
ret = salt_ssh_cli.run("salttest.jinja_error")
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
assert isinstance(ret.data, dict)
|
||||
assert ret.data["stderr"].endswith("Exception: hehehe")
|
||||
assert ret.data["retcode"] == 1
|
||||
|
|
|
@ -2,6 +2,8 @@ import json
|
|||
|
||||
import pytest
|
||||
|
||||
from salt.defaults.exitcodes import EX_AGGREGATE
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
|
||||
]
|
||||
|
@ -75,6 +77,129 @@ def state_tree_dir(base_env_state_tree_root_dir):
|
|||
yield
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def state_tree_render_fail(base_env_state_tree_root_dir):
|
||||
top_file = """
|
||||
base:
|
||||
'localhost':
|
||||
- fail_render
|
||||
'127.0.0.1':
|
||||
- fail_render
|
||||
"""
|
||||
state_file = r"""
|
||||
abc var is not defined {{ abc }}:
|
||||
test.nop
|
||||
"""
|
||||
top_tempfile = pytest.helpers.temp_file(
|
||||
"top.sls", top_file, base_env_state_tree_root_dir
|
||||
)
|
||||
state_tempfile = pytest.helpers.temp_file(
|
||||
"fail_render.sls", state_file, base_env_state_tree_root_dir
|
||||
)
|
||||
with top_tempfile, state_tempfile:
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def state_tree_req_fail(base_env_state_tree_root_dir):
|
||||
top_file = """
|
||||
base:
|
||||
'localhost':
|
||||
- fail_req
|
||||
'127.0.0.1':
|
||||
- fail_req
|
||||
"""
|
||||
state_file = """
|
||||
This has an invalid requisite:
|
||||
test.nop:
|
||||
- name: foo
|
||||
- require_in:
|
||||
- file.managed: invalid_requisite
|
||||
"""
|
||||
top_tempfile = pytest.helpers.temp_file(
|
||||
"top.sls", top_file, base_env_state_tree_root_dir
|
||||
)
|
||||
state_tempfile = pytest.helpers.temp_file(
|
||||
"fail_req.sls", state_file, base_env_state_tree_root_dir
|
||||
)
|
||||
with top_tempfile, state_tempfile:
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def state_tree_structure_fail(base_env_state_tree_root_dir):
|
||||
top_file = """
|
||||
base:
|
||||
'localhost':
|
||||
- fail_structure
|
||||
'127.0.0.1':
|
||||
- fail_structure
|
||||
"""
|
||||
state_file = """
|
||||
extend:
|
||||
Some file state:
|
||||
file:
|
||||
- name: /tmp/bar
|
||||
- contents: bar
|
||||
"""
|
||||
top_tempfile = pytest.helpers.temp_file(
|
||||
"top.sls", top_file, base_env_state_tree_root_dir
|
||||
)
|
||||
state_tempfile = pytest.helpers.temp_file(
|
||||
"fail_structure.sls", state_file, base_env_state_tree_root_dir
|
||||
)
|
||||
with top_tempfile, state_tempfile:
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def state_tree_run_fail(base_env_state_tree_root_dir):
|
||||
top_file = """
|
||||
base:
|
||||
'localhost':
|
||||
- fail_run
|
||||
'127.0.0.1':
|
||||
- fail_run
|
||||
"""
|
||||
state_file = """
|
||||
This file state fails:
|
||||
file.managed:
|
||||
- name: /tmp/non/ex/is/tent
|
||||
- makedirs: false
|
||||
- contents: foo
|
||||
"""
|
||||
top_tempfile = pytest.helpers.temp_file(
|
||||
"top.sls", top_file, base_env_state_tree_root_dir
|
||||
)
|
||||
state_tempfile = pytest.helpers.temp_file(
|
||||
"fail_run.sls", state_file, base_env_state_tree_root_dir
|
||||
)
|
||||
with top_tempfile, state_tempfile:
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def pillar_tree_render_fail(base_env_pillar_tree_root_dir):
|
||||
top_file = """
|
||||
base:
|
||||
'localhost':
|
||||
- fail_render
|
||||
'127.0.0.1':
|
||||
- fail_render
|
||||
"""
|
||||
pillar_file = r"""
|
||||
not_defined: {{ abc }}
|
||||
"""
|
||||
top_tempfile = pytest.helpers.temp_file(
|
||||
"top.sls", top_file, base_env_pillar_tree_root_dir
|
||||
)
|
||||
pillar_tempfile = pytest.helpers.temp_file(
|
||||
"fail_render.sls", pillar_file, base_env_pillar_tree_root_dir
|
||||
)
|
||||
with top_tempfile, pillar_tempfile:
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_state_with_import(salt_ssh_cli, state_tree):
|
||||
"""
|
||||
|
@ -220,3 +345,219 @@ def test_state_high(salt_ssh_cli):
|
|||
]["stdout"]
|
||||
== "blah"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
@pytest.mark.usefixtures("state_tree_render_fail")
|
||||
class TestRenderExceptionRetcode:
|
||||
"""
|
||||
Verify salt-ssh fails with a retcode > 0 when a state rendering fails.
|
||||
"""
|
||||
|
||||
def test_retcode_state_sls_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls", "fail_render")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_highstate_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.highstate")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_sls_id_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls_id", "foo", "fail_render")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_sls_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_sls", "fail_render")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_low_sls_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_low_sls", "fail_render")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_highstate_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_highstate")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_lowstate_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_lowstate")
|
||||
# state.show_lowstate exits with 0 for non-ssh as well
|
||||
self._assert_ret(ret, 0)
|
||||
|
||||
def test_retcode_state_top_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.top", "top.sls")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_single_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.single", "file")
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
assert isinstance(ret.data, str)
|
||||
assert "single() missing 1 required positional argument" in ret.data
|
||||
|
||||
def _assert_ret(self, ret, retcode):
|
||||
assert ret.returncode == retcode
|
||||
assert isinstance(ret.data, list)
|
||||
assert ret.data
|
||||
assert isinstance(ret.data[0], str)
|
||||
assert ret.data[0].startswith(
|
||||
"Rendering SLS 'base:fail_render' failed: Jinja variable 'abc' is undefined;"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
@pytest.mark.usefixtures("pillar_tree_render_fail")
|
||||
class TestPillarRenderExceptionRetcode:
|
||||
"""
|
||||
Verify salt-ssh fails with a retcode > 0 when a pillar rendering fails.
|
||||
"""
|
||||
|
||||
def test_retcode_state_sls_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls", "basic")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def test_retcode_state_highstate_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.highstate")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def test_retcode_state_sls_id_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls_id", "foo", "basic")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def test_retcode_state_show_sls_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_sls", "basic")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def test_retcode_state_show_low_sls_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_low_sls", "basic")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def test_retcode_state_show_highstate_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_highstate")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def test_retcode_state_show_lowstate_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_lowstate")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def test_retcode_state_top_pillar_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.top", "top.sls")
|
||||
self._assert_ret(ret)
|
||||
|
||||
def _assert_ret(self, ret):
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
assert isinstance(ret.data, list)
|
||||
assert ret.data
|
||||
assert isinstance(ret.data[0], str)
|
||||
assert ret.data[0] == "Pillar failed to render with the following messages:"
|
||||
assert ret.data[1].startswith("Rendering SLS 'fail_render' failed.")
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
@pytest.mark.usefixtures("state_tree_req_fail")
|
||||
class TestStateReqFailRetcode:
|
||||
"""
|
||||
Verify salt-ssh fails with a retcode > 0 when a highstate verification fails.
|
||||
``state.show_highstate`` does not validate this.
|
||||
"""
|
||||
|
||||
def test_retcode_state_sls_invalid_requisite(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls", "fail_req")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_highstate_invalid_requisite(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.highstate")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_sls_invalid_requisite(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_sls", "fail_req")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_low_sls_invalid_requisite(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_low_sls", "fail_req")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_lowstate_invalid_requisite(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_lowstate")
|
||||
# state.show_lowstate exits with 0 for non-ssh as well
|
||||
self._assert_ret(ret, 0)
|
||||
|
||||
def test_retcode_state_top_invalid_requisite(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.top", "top.sls")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def _assert_ret(self, ret, retcode):
|
||||
assert ret.returncode == retcode
|
||||
assert isinstance(ret.data, list)
|
||||
assert ret.data
|
||||
assert isinstance(ret.data[0], str)
|
||||
assert ret.data[0].startswith(
|
||||
"Invalid requisite in require: file.managed for invalid_requisite"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
@pytest.mark.usefixtures("state_tree_structure_fail")
|
||||
class TestStateStructureFailRetcode:
|
||||
"""
|
||||
Verify salt-ssh fails with a retcode > 0 when a highstate verification fails.
|
||||
This targets another step of the verification.
|
||||
``state.sls_id`` does not seem to support extends.
|
||||
``state.show_highstate`` does not validate this.
|
||||
"""
|
||||
|
||||
def test_retcode_state_sls_invalid_structure(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls", "fail_structure")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_highstate_invalid_structure(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.highstate")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_sls_invalid_structure(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_sls", "fail_structure")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_low_sls_invalid_structure(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_low_sls", "fail_structure")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def test_retcode_state_show_lowstate_invalid_structure(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.show_lowstate")
|
||||
# state.show_lowstate exits with 0 for non-ssh as well
|
||||
self._assert_ret(ret, 0)
|
||||
|
||||
def test_retcode_state_top_invalid_structure(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.top", "top.sls")
|
||||
self._assert_ret(ret, EX_AGGREGATE)
|
||||
|
||||
def _assert_ret(self, ret, retcode):
|
||||
assert ret.returncode == retcode
|
||||
assert isinstance(ret.data, list)
|
||||
assert ret.data
|
||||
assert isinstance(ret.data[0], str)
|
||||
assert ret.data[0].startswith(
|
||||
"Cannot extend ID 'Some file state' in 'base:fail_structure"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
@pytest.mark.usefixtures("state_tree_run_fail")
|
||||
class TestStateRunFailRetcode:
|
||||
"""
|
||||
Verify salt-ssh passes on a failing retcode from state execution.
|
||||
"""
|
||||
|
||||
def test_retcode_state_sls_run_fail(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls", "fail_run")
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
|
||||
def test_retcode_state_highstate_run_fail(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.highstate")
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
|
||||
def test_retcode_state_sls_id_render_exception(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.sls_id", "This file state fails", "fail_run")
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
|
||||
def test_retcode_state_top_run_fail(self, salt_ssh_cli):
|
||||
ret = salt_ssh_cli.run("state.top", "top.sls")
|
||||
assert ret.returncode == EX_AGGREGATE
|
||||
|
|
|
@ -30,13 +30,16 @@ def test_password_failure(temp_salt_master, tmp_path):
|
|||
opts["arg"] = []
|
||||
roster = str(tmp_path / "roster")
|
||||
handle_ssh_ret = [
|
||||
{
|
||||
"localhost": {
|
||||
"retcode": 255,
|
||||
"stderr": "Permission denied (publickey).\r\n",
|
||||
"stdout": "",
|
||||
}
|
||||
},
|
||||
(
|
||||
{
|
||||
"localhost": {
|
||||
"retcode": 255,
|
||||
"stderr": "Permission denied (publickey).\r\n",
|
||||
"stdout": "",
|
||||
}
|
||||
},
|
||||
1,
|
||||
)
|
||||
]
|
||||
expected = {"localhost": "Permission denied (publickey)"}
|
||||
display_output = MagicMock()
|
||||
|
@ -50,4 +53,4 @@ def test_password_failure(temp_salt_master, tmp_path):
|
|||
with pytest.raises(SystemExit):
|
||||
client.run()
|
||||
display_output.assert_called_once_with(expected, "nested", opts)
|
||||
assert ret is handle_ssh_ret[0]
|
||||
assert ret is handle_ssh_ret[0][0]
|
||||
|
|
|
@ -26,9 +26,7 @@ def test_not_missing_fun_calling_wfuncs(temp_salt_master, tmp_path):
|
|||
opts["tgt"] = "localhost"
|
||||
opts["arg"] = []
|
||||
roster = str(tmp_path / "roster")
|
||||
handle_ssh_ret = [
|
||||
{"localhost": {}},
|
||||
]
|
||||
handle_ssh_ret = [({"localhost": {}}, 0)]
|
||||
|
||||
expected = {"localhost": {}}
|
||||
display_output = MagicMock()
|
||||
|
@ -44,7 +42,7 @@ def test_not_missing_fun_calling_wfuncs(temp_salt_master, tmp_path):
|
|||
assert "fun" in ret["localhost"]
|
||||
client.run()
|
||||
display_output.assert_called_once_with(expected, "nested", opts)
|
||||
assert ret is handle_ssh_ret[0]
|
||||
assert ret is handle_ssh_ret[0][0]
|
||||
assert len(client.event.fire_event.call_args_list) == 2
|
||||
assert "fun" in client.event.fire_event.call_args_list[0][0][0]
|
||||
assert "fun" in client.event.fire_event.call_args_list[1][0][0]
|
||||
|
|
|
@ -189,6 +189,8 @@ def test_present_uid_gid_change():
|
|||
"user.chgid": Mock(),
|
||||
"file.group_to_gid": mock_group_to_gid,
|
||||
"file.gid_to_group": mock_gid_to_group,
|
||||
"group.info": MagicMock(return_value=after),
|
||||
"user.chgroups": MagicMock(return_value=True),
|
||||
}
|
||||
with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict(
|
||||
user.__salt__, dunder_salt
|
||||
|
|
186
tools/ci.py
186
tools/ci.py
|
@ -8,14 +8,20 @@ import json
|
|||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import yaml
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
else:
|
||||
from typing import NotRequired, TypedDict # pylint: disable=no-name-in-module
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
|
@ -382,6 +388,13 @@ def define_jobs(
|
|||
wfh.write(f"jobs={json.dumps(jobs)}\n")
|
||||
|
||||
|
||||
class TestRun(TypedDict):
|
||||
type: str
|
||||
skip_code_coverage: bool
|
||||
from_filenames: NotRequired[str]
|
||||
selected_tests: NotRequired[dict[str, bool]]
|
||||
|
||||
|
||||
@ci.command(
|
||||
name="define-testrun",
|
||||
arguments={
|
||||
|
@ -416,10 +429,31 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
|||
if TYPE_CHECKING:
|
||||
assert github_step_summary is not None
|
||||
|
||||
labels: list[str] = []
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is not None:
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path).read())
|
||||
except Exception as exc:
|
||||
ctx.error(
|
||||
f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc
|
||||
)
|
||||
ctx.exit(1)
|
||||
|
||||
labels.extend(
|
||||
label[0] for label in _get_pr_test_labels_from_event_payload(gh_event)
|
||||
)
|
||||
|
||||
skip_code_coverage = True
|
||||
if "test:coverage" in labels:
|
||||
skip_code_coverage = False
|
||||
elif event_name != "pull_request":
|
||||
skip_code_coverage = False
|
||||
|
||||
if event_name != "pull_request":
|
||||
# In this case, a full test run is in order
|
||||
ctx.info("Writing 'testrun' to the github outputs file")
|
||||
testrun = {"type": "full"}
|
||||
testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage)
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"testrun={json.dumps(testrun)}\n")
|
||||
|
||||
|
@ -441,7 +475,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
|||
ctx.exit(1)
|
||||
|
||||
# So, it's a pull request...
|
||||
# Based on which files changed, or other things like PR comments we can
|
||||
# Based on which files changed, or other things like PR labels we can
|
||||
# decide what to run, or even if the full test run should be running on the
|
||||
# pull request, etc...
|
||||
changed_pkg_requirements_files = json.loads(
|
||||
|
@ -456,7 +490,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
|||
"Full test run chosen because there was a change made "
|
||||
"to `cicd/golden-images.json`.\n"
|
||||
)
|
||||
testrun = {"type": "full"}
|
||||
testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage)
|
||||
elif changed_pkg_requirements_files or changed_test_requirements_files:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(
|
||||
|
@ -471,15 +505,20 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
|||
):
|
||||
wfh.write(f"{path}\n")
|
||||
wfh.write("</pre>\n</details>\n")
|
||||
testrun = {"type": "full"}
|
||||
testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage)
|
||||
elif "test:full" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Full test run chosen because the label `test:full` is set.\n")
|
||||
testrun = TestRun(type="full", skip_code_coverage=skip_code_coverage)
|
||||
else:
|
||||
testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
|
||||
testrun = {
|
||||
"type": "changed",
|
||||
"from-filenames": str(
|
||||
testrun = TestRun(
|
||||
type="changed",
|
||||
skip_code_coverage=skip_code_coverage,
|
||||
from_filenames=str(
|
||||
testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
|
||||
),
|
||||
}
|
||||
)
|
||||
ctx.info(f"Writing {testrun_changed_files_path.name} ...")
|
||||
selected_changed_files = []
|
||||
for fpath in json.loads(changed_files_contents["testrun_files"]):
|
||||
|
@ -499,6 +538,28 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
|||
if testrun["type"] == "changed":
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Partial test run chosen.\n")
|
||||
testrun["selected_tests"] = {
|
||||
"core": False,
|
||||
"slow": False,
|
||||
"fast": True,
|
||||
"flaky": False,
|
||||
}
|
||||
if "test:slow" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Slow tests chosen by `test:slow` label.\n")
|
||||
testrun["selected_tests"]["slow"] = True
|
||||
if "test:core" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Core tests chosen by `test:core` label.\n")
|
||||
testrun["selected_tests"]["core"] = True
|
||||
if "test:no-fast" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Fast tests deselected by `test:no-fast` label.\n")
|
||||
testrun["selected_tests"]["fast"] = False
|
||||
if "test:flaky-jail" in labels:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write("Flaky jailed tests chosen by `test:flaky-jail` label.\n")
|
||||
testrun["selected_tests"]["flaky"] = True
|
||||
if selected_changed_files:
|
||||
with open(github_step_summary, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(
|
||||
|
@ -730,3 +791,110 @@ def get_release_changelog_target(ctx: Context, event_name: str):
|
|||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"release-changelog-target={release_changelog_target}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
@ci.command(
|
||||
name="get-pr-test-labels",
|
||||
arguments={
|
||||
"pr": {
|
||||
"help": "Pull request number",
|
||||
},
|
||||
"repository": {
|
||||
"help": "Github repository.",
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_pr_test_labels(
|
||||
ctx: Context, repository: str = "saltstack/salt", pr: int = None
|
||||
):
|
||||
"""
|
||||
Set the pull-request labels.
|
||||
"""
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is None:
|
||||
labels = _get_pr_test_labels_from_api(ctx, repository, pr=pr)
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event_path is not None
|
||||
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path).read())
|
||||
except Exception as exc:
|
||||
ctx.error(
|
||||
f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc
|
||||
)
|
||||
ctx.exit(1)
|
||||
|
||||
if "pull_request" not in gh_event:
|
||||
ctx.warning("The 'pull_request' key was not found on the event payload.")
|
||||
ctx.exit(1)
|
||||
|
||||
pr = gh_event["pull_request"]["number"]
|
||||
labels = _get_pr_test_labels_from_event_payload(gh_event)
|
||||
|
||||
if labels:
|
||||
ctx.info(f"Test labels for pull-request #{pr} on {repository}:")
|
||||
for name, description in labels:
|
||||
ctx.info(f" * [yellow]{name}[/yellow]: {description}")
|
||||
else:
|
||||
ctx.info(f"No test labels for pull-request #{pr} on {repository}")
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
ctx.exit(0)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert github_output is not None
|
||||
|
||||
ctx.info("Writing 'labels' to the github outputs file")
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"labels={json.dumps([label[0] for label in labels])}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
def _get_pr_test_labels_from_api(
|
||||
ctx: Context, repository: str = "saltstack/salt", pr: int = None
|
||||
) -> list[tuple[str, str]]:
|
||||
"""
|
||||
Set the pull-request labels.
|
||||
"""
|
||||
if pr is None:
|
||||
ctx.error(
|
||||
"Could not find the 'GITHUB_EVENT_PATH' variable and the "
|
||||
"--pr flag was not passed. Unable to detect pull-request number."
|
||||
)
|
||||
ctx.exit(1)
|
||||
with ctx.web as web:
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
}
|
||||
if "GITHUB_TOKEN" in os.environ:
|
||||
headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}"
|
||||
web.headers.update(headers)
|
||||
ret = web.get(f"https://api.github.com/repos/{repository}/pulls/{pr}")
|
||||
if ret.status_code != 200:
|
||||
ctx.error(
|
||||
f"Failed to get the #{pr} pull-request details on repository {repository!r}: {ret.reason}"
|
||||
)
|
||||
ctx.exit(1)
|
||||
pr_details = ret.json()
|
||||
return _filter_test_labels(pr_details["labels"])
|
||||
|
||||
|
||||
def _get_pr_test_labels_from_event_payload(
|
||||
gh_event: dict[str, Any]
|
||||
) -> list[tuple[str, str]]:
|
||||
"""
|
||||
Get the pull-request test labels.
|
||||
"""
|
||||
if "pull_request" not in gh_event:
|
||||
return []
|
||||
return _filter_test_labels(gh_event["pull_request"]["labels"])
|
||||
|
||||
|
||||
def _filter_test_labels(labels: list[dict[str, Any]]) -> list[tuple[str, str]]:
|
||||
return [
|
||||
(label["name"], label["description"])
|
||||
for label in labels
|
||||
if label["name"].startswith("test:")
|
||||
]
|
||||
|
|
Loading…
Add table
Reference in a new issue