mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #65511 from s0undt3ch/hotfix/merge-forward
[master] Merge 3006.x into master
This commit is contained in:
commit
2b4b3ee2f6
62 changed files with 2679 additions and 1999 deletions
58
.github/workflows/ci.yml
vendored
58
.github/workflows/ci.yml
vendored
|
@ -1868,6 +1868,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
windows-2019:
|
||||
name: Windows 2019 Test
|
||||
|
@ -1888,6 +1890,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
windows-2022:
|
||||
name: Windows 2022 Test
|
||||
|
@ -1908,6 +1912,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
macos-12:
|
||||
name: macOS 12 Test
|
||||
|
@ -1928,6 +1934,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
almalinux-8:
|
||||
name: Alma Linux 8 Test
|
||||
|
@ -1948,6 +1956,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
almalinux-9:
|
||||
name: Alma Linux 9 Test
|
||||
|
@ -1968,6 +1978,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
amazonlinux-2:
|
||||
name: Amazon Linux 2 Test
|
||||
|
@ -1988,6 +2000,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
archlinux-lts:
|
||||
name: Arch Linux LTS Test
|
||||
|
@ -2008,6 +2022,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
centos-7:
|
||||
name: CentOS 7 Test
|
||||
|
@ -2028,6 +2044,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
centosstream-8:
|
||||
name: CentOS Stream 8 Test
|
||||
|
@ -2048,6 +2066,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
centosstream-9:
|
||||
name: CentOS Stream 9 Test
|
||||
|
@ -2068,6 +2088,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
debian-10:
|
||||
name: Debian 10 Test
|
||||
|
@ -2088,6 +2110,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
debian-11:
|
||||
name: Debian 11 Test
|
||||
|
@ -2108,6 +2132,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
debian-11-arm64:
|
||||
name: Debian 11 Arm64 Test
|
||||
|
@ -2128,6 +2154,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
debian-12:
|
||||
name: Debian 12 Test
|
||||
|
@ -2148,6 +2176,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
debian-12-arm64:
|
||||
name: Debian 12 Arm64 Test
|
||||
|
@ -2168,6 +2198,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37 Test
|
||||
|
@ -2188,6 +2220,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
fedora-38:
|
||||
name: Fedora 38 Test
|
||||
|
@ -2208,6 +2242,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
opensuse-15:
|
||||
name: Opensuse 15 Test
|
||||
|
@ -2228,6 +2264,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
photonos-3:
|
||||
name: Photon OS 3 Test
|
||||
|
@ -2248,6 +2286,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
photonos-3-arm64:
|
||||
name: Photon OS 3 Arm64 Test
|
||||
|
@ -2268,6 +2308,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
photonos-4:
|
||||
name: Photon OS 4 Test
|
||||
|
@ -2288,6 +2330,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
photonos-4-arm64:
|
||||
name: Photon OS 4 Arm64 Test
|
||||
|
@ -2308,6 +2352,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
photonos-5:
|
||||
name: Photon OS 5 Test
|
||||
|
@ -2328,6 +2374,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
photonos-5-arm64:
|
||||
name: Photon OS 5 Arm64 Test
|
||||
|
@ -2348,6 +2396,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2004:
|
||||
name: Ubuntu 20.04 Test
|
||||
|
@ -2368,6 +2418,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2004-arm64:
|
||||
name: Ubuntu 20.04 Arm64 Test
|
||||
|
@ -2388,6 +2440,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2204:
|
||||
name: Ubuntu 22.04 Test
|
||||
|
@ -2408,6 +2462,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2204-arm64:
|
||||
name: Ubuntu 22.04 Arm64 Test
|
||||
|
@ -2428,6 +2484,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
workflow-slug: ci
|
||||
default-timeout: 180
|
||||
|
||||
combine-all-code-coverage:
|
||||
name: Combine Code Coverage
|
||||
|
|
60
.github/workflows/nightly.yml
vendored
60
.github/workflows/nightly.yml
vendored
|
@ -18,7 +18,7 @@ on:
|
|||
description: Skip running the Salt packages test suite.
|
||||
schedule:
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
|
||||
- cron: '0 1 * * *' # Every day at 1AM
|
||||
- cron: '0 0 * * *' # Every day at 0AM
|
||||
|
||||
env:
|
||||
COLUMNS: 190
|
||||
|
@ -1924,6 +1924,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
windows-2019:
|
||||
name: Windows 2019 Test
|
||||
|
@ -1944,6 +1946,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
windows-2022:
|
||||
name: Windows 2022 Test
|
||||
|
@ -1964,6 +1968,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
macos-12:
|
||||
name: macOS 12 Test
|
||||
|
@ -1984,6 +1990,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
almalinux-8:
|
||||
name: Alma Linux 8 Test
|
||||
|
@ -2004,6 +2012,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
almalinux-9:
|
||||
name: Alma Linux 9 Test
|
||||
|
@ -2024,6 +2034,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
amazonlinux-2:
|
||||
name: Amazon Linux 2 Test
|
||||
|
@ -2044,6 +2056,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
archlinux-lts:
|
||||
name: Arch Linux LTS Test
|
||||
|
@ -2064,6 +2078,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
centos-7:
|
||||
name: CentOS 7 Test
|
||||
|
@ -2084,6 +2100,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
centosstream-8:
|
||||
name: CentOS Stream 8 Test
|
||||
|
@ -2104,6 +2122,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
centosstream-9:
|
||||
name: CentOS Stream 9 Test
|
||||
|
@ -2124,6 +2144,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
debian-10:
|
||||
name: Debian 10 Test
|
||||
|
@ -2144,6 +2166,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
debian-11:
|
||||
name: Debian 11 Test
|
||||
|
@ -2164,6 +2188,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
debian-11-arm64:
|
||||
name: Debian 11 Arm64 Test
|
||||
|
@ -2184,6 +2210,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
debian-12:
|
||||
name: Debian 12 Test
|
||||
|
@ -2204,6 +2232,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
debian-12-arm64:
|
||||
name: Debian 12 Arm64 Test
|
||||
|
@ -2224,6 +2254,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37 Test
|
||||
|
@ -2244,6 +2276,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
fedora-38:
|
||||
name: Fedora 38 Test
|
||||
|
@ -2264,6 +2298,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
opensuse-15:
|
||||
name: Opensuse 15 Test
|
||||
|
@ -2284,6 +2320,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
photonos-3:
|
||||
name: Photon OS 3 Test
|
||||
|
@ -2304,6 +2342,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
photonos-3-arm64:
|
||||
name: Photon OS 3 Arm64 Test
|
||||
|
@ -2324,6 +2364,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
photonos-4:
|
||||
name: Photon OS 4 Test
|
||||
|
@ -2344,6 +2386,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
photonos-4-arm64:
|
||||
name: Photon OS 4 Arm64 Test
|
||||
|
@ -2364,6 +2408,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
photonos-5:
|
||||
name: Photon OS 5 Test
|
||||
|
@ -2384,6 +2430,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
photonos-5-arm64:
|
||||
name: Photon OS 5 Arm64 Test
|
||||
|
@ -2404,6 +2452,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2004:
|
||||
name: Ubuntu 20.04 Test
|
||||
|
@ -2424,6 +2474,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2004-arm64:
|
||||
name: Ubuntu 20.04 Arm64 Test
|
||||
|
@ -2444,6 +2496,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2204:
|
||||
name: Ubuntu 22.04 Test
|
||||
|
@ -2464,6 +2518,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2204-arm64:
|
||||
name: Ubuntu 22.04 Arm64 Test
|
||||
|
@ -2484,6 +2540,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: nightly
|
||||
default-timeout: 360
|
||||
|
||||
combine-all-code-coverage:
|
||||
name: Combine Code Coverage
|
||||
|
|
1
.github/workflows/release.yml
vendored
1
.github/workflows/release.yml
vendored
|
@ -116,7 +116,6 @@ jobs:
|
|||
|
||||
download-onedir-artifact:
|
||||
name: Download Staging Onedir Artifact
|
||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- linux
|
||||
|
|
58
.github/workflows/scheduled.yml
vendored
58
.github/workflows/scheduled.yml
vendored
|
@ -1902,6 +1902,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
windows-2019:
|
||||
name: Windows 2019 Test
|
||||
|
@ -1922,6 +1924,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
windows-2022:
|
||||
name: Windows 2022 Test
|
||||
|
@ -1942,6 +1946,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
macos-12:
|
||||
name: macOS 12 Test
|
||||
|
@ -1962,6 +1968,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
almalinux-8:
|
||||
name: Alma Linux 8 Test
|
||||
|
@ -1982,6 +1990,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
almalinux-9:
|
||||
name: Alma Linux 9 Test
|
||||
|
@ -2002,6 +2012,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
amazonlinux-2:
|
||||
name: Amazon Linux 2 Test
|
||||
|
@ -2022,6 +2034,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
archlinux-lts:
|
||||
name: Arch Linux LTS Test
|
||||
|
@ -2042,6 +2056,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
centos-7:
|
||||
name: CentOS 7 Test
|
||||
|
@ -2062,6 +2078,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
centosstream-8:
|
||||
name: CentOS Stream 8 Test
|
||||
|
@ -2082,6 +2100,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
centosstream-9:
|
||||
name: CentOS Stream 9 Test
|
||||
|
@ -2102,6 +2122,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
debian-10:
|
||||
name: Debian 10 Test
|
||||
|
@ -2122,6 +2144,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
debian-11:
|
||||
name: Debian 11 Test
|
||||
|
@ -2142,6 +2166,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
debian-11-arm64:
|
||||
name: Debian 11 Arm64 Test
|
||||
|
@ -2162,6 +2188,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
debian-12:
|
||||
name: Debian 12 Test
|
||||
|
@ -2182,6 +2210,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
debian-12-arm64:
|
||||
name: Debian 12 Arm64 Test
|
||||
|
@ -2202,6 +2232,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37 Test
|
||||
|
@ -2222,6 +2254,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
fedora-38:
|
||||
name: Fedora 38 Test
|
||||
|
@ -2242,6 +2276,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
opensuse-15:
|
||||
name: Opensuse 15 Test
|
||||
|
@ -2262,6 +2298,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
photonos-3:
|
||||
name: Photon OS 3 Test
|
||||
|
@ -2282,6 +2320,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
photonos-3-arm64:
|
||||
name: Photon OS 3 Arm64 Test
|
||||
|
@ -2302,6 +2342,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
photonos-4:
|
||||
name: Photon OS 4 Test
|
||||
|
@ -2322,6 +2364,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
photonos-4-arm64:
|
||||
name: Photon OS 4 Arm64 Test
|
||||
|
@ -2342,6 +2386,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
photonos-5:
|
||||
name: Photon OS 5 Test
|
||||
|
@ -2362,6 +2408,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
photonos-5-arm64:
|
||||
name: Photon OS 5 Arm64 Test
|
||||
|
@ -2382,6 +2430,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2004:
|
||||
name: Ubuntu 20.04 Test
|
||||
|
@ -2402,6 +2452,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2004-arm64:
|
||||
name: Ubuntu 20.04 Arm64 Test
|
||||
|
@ -2422,6 +2474,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2204:
|
||||
name: Ubuntu 22.04 Test
|
||||
|
@ -2442,6 +2496,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
ubuntu-2204-arm64:
|
||||
name: Ubuntu 22.04 Arm64 Test
|
||||
|
@ -2462,6 +2518,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
workflow-slug: scheduled
|
||||
default-timeout: 360
|
||||
|
||||
combine-all-code-coverage:
|
||||
name: Combine Code Coverage
|
||||
|
|
58
.github/workflows/staging.yml
vendored
58
.github/workflows/staging.yml
vendored
|
@ -1924,6 +1924,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
windows-2019:
|
||||
name: Windows 2019 Test
|
||||
|
@ -1944,6 +1946,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
windows-2022:
|
||||
name: Windows 2022 Test
|
||||
|
@ -1964,6 +1968,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
macos-12:
|
||||
name: macOS 12 Test
|
||||
|
@ -1984,6 +1990,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
almalinux-8:
|
||||
name: Alma Linux 8 Test
|
||||
|
@ -2004,6 +2012,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
almalinux-9:
|
||||
name: Alma Linux 9 Test
|
||||
|
@ -2024,6 +2034,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
amazonlinux-2:
|
||||
name: Amazon Linux 2 Test
|
||||
|
@ -2044,6 +2056,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
archlinux-lts:
|
||||
name: Arch Linux LTS Test
|
||||
|
@ -2064,6 +2078,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
centos-7:
|
||||
name: CentOS 7 Test
|
||||
|
@ -2084,6 +2100,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
centosstream-8:
|
||||
name: CentOS Stream 8 Test
|
||||
|
@ -2104,6 +2122,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
centosstream-9:
|
||||
name: CentOS Stream 9 Test
|
||||
|
@ -2124,6 +2144,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
debian-10:
|
||||
name: Debian 10 Test
|
||||
|
@ -2144,6 +2166,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
debian-11:
|
||||
name: Debian 11 Test
|
||||
|
@ -2164,6 +2188,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
debian-11-arm64:
|
||||
name: Debian 11 Arm64 Test
|
||||
|
@ -2184,6 +2210,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
debian-12:
|
||||
name: Debian 12 Test
|
||||
|
@ -2204,6 +2232,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
debian-12-arm64:
|
||||
name: Debian 12 Arm64 Test
|
||||
|
@ -2224,6 +2254,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37 Test
|
||||
|
@ -2244,6 +2276,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
fedora-38:
|
||||
name: Fedora 38 Test
|
||||
|
@ -2264,6 +2298,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
opensuse-15:
|
||||
name: Opensuse 15 Test
|
||||
|
@ -2284,6 +2320,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
photonos-3:
|
||||
name: Photon OS 3 Test
|
||||
|
@ -2304,6 +2342,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
photonos-3-arm64:
|
||||
name: Photon OS 3 Arm64 Test
|
||||
|
@ -2324,6 +2364,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
photonos-4:
|
||||
name: Photon OS 4 Test
|
||||
|
@ -2344,6 +2386,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
photonos-4-arm64:
|
||||
name: Photon OS 4 Arm64 Test
|
||||
|
@ -2364,6 +2408,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
photonos-5:
|
||||
name: Photon OS 5 Test
|
||||
|
@ -2384,6 +2430,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
photonos-5-arm64:
|
||||
name: Photon OS 5 Arm64 Test
|
||||
|
@ -2404,6 +2452,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2004:
|
||||
name: Ubuntu 20.04 Test
|
||||
|
@ -2424,6 +2474,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2004-arm64:
|
||||
name: Ubuntu 20.04 Arm64 Test
|
||||
|
@ -2444,6 +2496,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2204:
|
||||
name: Ubuntu 22.04 Test
|
||||
|
@ -2464,6 +2518,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
ubuntu-2204-arm64:
|
||||
name: Ubuntu 22.04 Arm64 Test
|
||||
|
@ -2484,6 +2540,8 @@ jobs:
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
workflow-slug: staging
|
||||
default-timeout: 180
|
||||
|
||||
build-src-repo:
|
||||
name: Build Repository
|
||||
|
|
|
@ -28,7 +28,7 @@ on:
|
|||
description: Skip running the Salt packages test suite.
|
||||
schedule:
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule
|
||||
- cron: '0 1 * * *' # Every day at 1AM
|
||||
- cron: '0 0 * * *' # Every day at 0AM
|
||||
|
||||
<%- endblock on %>
|
||||
|
||||
|
|
|
@ -150,7 +150,6 @@ permissions:
|
|||
|
||||
download-onedir-artifact:
|
||||
name: Download Staging Onedir Artifact
|
||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- linux
|
||||
|
|
11
.github/workflows/templates/test-salt.yml.jinja
vendored
11
.github/workflows/templates/test-salt.yml.jinja
vendored
|
@ -1,3 +1,8 @@
|
|||
<%- if workflow_slug in ("nightly", "scheduled") %>
|
||||
<%- set timeout_value = 360 %>
|
||||
<%- else %>
|
||||
<%- set timeout_value = 180 %>
|
||||
<%- endif %>
|
||||
|
||||
<%- for slug, display_name, arch in test_salt_listing["windows"] %>
|
||||
|
||||
|
@ -21,6 +26,8 @@
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||
skip-junit-reports: <{ skip_junit_reports_check }>
|
||||
workflow-slug: <{ workflow_slug }>
|
||||
default-timeout: <{ timeout_value }>
|
||||
|
||||
<%- endfor %>
|
||||
|
||||
|
@ -47,6 +54,8 @@
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||
skip-junit-reports: <{ skip_junit_reports_check }>
|
||||
workflow-slug: <{ workflow_slug }>
|
||||
default-timeout: <{ timeout_value }>
|
||||
|
||||
<%- endfor %>
|
||||
|
||||
|
@ -73,5 +82,7 @@
|
|||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
|
||||
skip-code-coverage: <{ skip_test_coverage_check }>
|
||||
skip-junit-reports: <{ skip_junit_reports_check }>
|
||||
workflow-slug: <{ workflow_slug }>
|
||||
default-timeout: <{ timeout_value }>
|
||||
|
||||
<%- endfor %>
|
||||
|
|
18
.github/workflows/test-action-macos.yml
vendored
18
.github/workflows/test-action-macos.yml
vendored
|
@ -56,6 +56,16 @@ on:
|
|||
type: boolean
|
||||
description: Skip Publishing JUnit Reports
|
||||
default: false
|
||||
workflow-slug:
|
||||
required: false
|
||||
type: string
|
||||
description: Which workflow is running.
|
||||
default: ci
|
||||
default-timeout:
|
||||
required: false
|
||||
type: number
|
||||
description: Timeout, in minutes, for the test job(Default 360, 6 hours).
|
||||
default: 360
|
||||
|
||||
env:
|
||||
COLUMNS: 190
|
||||
|
@ -85,14 +95,14 @@ jobs:
|
|||
- name: Generate Test Matrix
|
||||
id: generate-matrix
|
||||
run: |
|
||||
tools ci matrix ${{ inputs.distro-slug }}
|
||||
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ inputs.distro-slug }}
|
||||
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ${{ inputs.distro-slug }}
|
||||
# Full test runs. Each chunk should never take more than 3 hours.
|
||||
# Partial test runs(no chunk parallelization), 5 Hours
|
||||
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 180 || 300 }}
|
||||
# Full test runs. Each chunk should never take more than 2 hours.
|
||||
# Partial test runs(no chunk parallelization), 6 Hours
|
||||
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
|
|
18
.github/workflows/test-action.yml
vendored
18
.github/workflows/test-action.yml
vendored
|
@ -56,6 +56,16 @@ on:
|
|||
type: boolean
|
||||
description: Skip Publishing JUnit Reports
|
||||
default: false
|
||||
workflow-slug:
|
||||
required: false
|
||||
type: string
|
||||
description: Which workflow is running.
|
||||
default: ci
|
||||
default-timeout:
|
||||
required: false
|
||||
type: number
|
||||
description: Timeout, in minutes, for the test job(Default 360, 6 hours).
|
||||
default: 360
|
||||
|
||||
env:
|
||||
COLUMNS: 190
|
||||
|
@ -90,7 +100,7 @@ jobs:
|
|||
- name: Generate Test Matrix
|
||||
id: generate-matrix
|
||||
run: |
|
||||
tools ci matrix ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }}
|
||||
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }}
|
||||
|
||||
test:
|
||||
name: Test
|
||||
|
@ -98,9 +108,9 @@ jobs:
|
|||
- self-hosted
|
||||
- linux
|
||||
- bastion
|
||||
# Full test runs. Each chunk should never take more than 3 hours.
|
||||
# Partial test runs(no chunk parallelization), 5 Hours
|
||||
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && 180 || 300 }}
|
||||
# Full test runs. Each chunk should never take more than 2 hours.
|
||||
# Partial test runs(no chunk parallelization), 6 Hours
|
||||
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
|
|
|
@ -697,7 +697,8 @@ allowed-3rd-party-modules=msgpack,
|
|||
fnmatch,
|
||||
ptscripts,
|
||||
packaging,
|
||||
looseversion
|
||||
looseversion,
|
||||
pytestskipmarkers
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
|
|
1
changelog/65114.fixed.md
Normal file
1
changelog/65114.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fix nonce verification, request server replies do not stomp on eachother.
|
2
changelog/65231.fixed.md
Normal file
2
changelog/65231.fixed.md
Normal file
|
@ -0,0 +1,2 @@
|
|||
Install logrotate config as /etc/logrotate.d/salt-common for Debian packages
|
||||
Remove broken /etc/logrotate.d/salt directory from 3006.3 if it exists.
|
|
@ -25,7 +25,7 @@ Description: Salt debug symbols
|
|||
Package: salt-common
|
||||
Architecture: amd64 arm64
|
||||
Depends: ${misc:Depends}
|
||||
Breaks: salt-minion (<= 3006.1)
|
||||
Breaks: salt-minion (<= 3006.4)
|
||||
Suggests: ifupdown
|
||||
Recommends: lsb-release
|
||||
Description: shared libraries that salt requires for all packages
|
||||
|
@ -51,8 +51,8 @@ Description: shared libraries that salt requires for all packages
|
|||
|
||||
Package: salt-master
|
||||
Architecture: amd64 arm64
|
||||
Replaces: salt-common (<= 3006.1)
|
||||
Breaks: salt-common (<= 3006.1)
|
||||
Replaces: salt-common (<= 3006.4)
|
||||
Breaks: salt-common (<= 3006.4)
|
||||
Depends: salt-common (= ${source:Version}),
|
||||
${misc:Depends}
|
||||
Description: remote manager to administer servers via salt
|
||||
|
@ -77,8 +77,8 @@ Description: remote manager to administer servers via salt
|
|||
|
||||
Package: salt-minion
|
||||
Architecture: amd64 arm64
|
||||
Replaces: salt-common (<= 3006.1)
|
||||
Breaks: salt-common (<= 3006.1)
|
||||
Replaces: salt-common (<= 3006.4)
|
||||
Breaks: salt-common (<= 3006.4)
|
||||
Depends: bsdmainutils,
|
||||
dctrl-tools,
|
||||
salt-common (= ${source:Version}),
|
||||
|
@ -131,7 +131,7 @@ Description: master-of-masters for salt, the distributed remote execution system
|
|||
|
||||
Package: salt-ssh
|
||||
Architecture: amd64 arm64
|
||||
Breaks: salt-common (<= 3006.3)
|
||||
Breaks: salt-common (<= 3006.4)
|
||||
Depends: salt-common (= ${source:Version}),
|
||||
openssh-client,
|
||||
${misc:Depends}
|
||||
|
@ -160,7 +160,7 @@ Description: remote manager to administer servers via Salt SSH
|
|||
|
||||
Package: salt-cloud
|
||||
Architecture: amd64 arm64
|
||||
Breaks: salt-common (<= 3006.3)
|
||||
Breaks: salt-common (<= 3006.4)
|
||||
Depends: salt-common (= ${source:Version}),
|
||||
${misc:Depends}
|
||||
Description: public cloud VM management system
|
||||
|
|
1
pkg/debian/salt-common.conffiles
Normal file
1
pkg/debian/salt-common.conffiles
Normal file
|
@ -0,0 +1 @@
|
|||
/etc/logrotate.d/salt-common
|
|
@ -4,3 +4,4 @@
|
|||
/usr/share/fish/vendor_completions.d
|
||||
/opt/saltstack/salt
|
||||
/etc/salt
|
||||
/etc/logrotate.d
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
pkg/common/salt-common.logrotate /etc/logrotate.d/salt
|
||||
pkg/common/logrotate/salt-common /etc/logrotate.d
|
||||
pkg/common/fish-completions/salt-cp.fish /usr/share/fish/vendor_completions.d
|
||||
pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d
|
||||
pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d
|
||||
|
|
|
@ -31,5 +31,9 @@ case "$1" in
|
|||
-s $SALT_SHELL \
|
||||
-g $SALT_GROUP \
|
||||
$SALT_USER
|
||||
|
||||
# Remove incorrectly installed logrotate config - issue 65231
|
||||
test -d /etc/logrotate.d/salt && rm -r /etc/logrotate.d/salt || /bin/true
|
||||
|
||||
;;
|
||||
esac
|
||||
|
|
|
@ -10,5 +10,9 @@ case "$1" in
|
|||
find ${SALT_HOME} /etc/salt /var/log/salt /var/cache/salt /var/run/salt \
|
||||
\! \( -path /etc/salt/cloud.deploy.d\* -o -path /var/log/salt/cloud -o -path /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy\* \) -a \
|
||||
\( -user ${SALT_USER} -o -group ${SALT_GROUP} \) -exec chown root:root \{\} \;
|
||||
|
||||
# remove incorrectly installed ufw salt-master directory - issue 57712
|
||||
test -d /etc/ufw/applications.d/salt-master && rm -rf /etc/ufw/applications.d/salt-master || /bin/true
|
||||
|
||||
;;
|
||||
esac
|
||||
|
|
|
@ -266,7 +266,7 @@ install -p -m 0644 %{_salt_src}/pkg/common/salt-proxy@.service %{buildroot}%{_un
|
|||
# Logrotate
|
||||
#install -p %{SOURCE10} .
|
||||
mkdir -p %{buildroot}%{_sysconfdir}/logrotate.d/
|
||||
install -p -m 0644 %{_salt_src}/pkg/common/salt-common.logrotate %{buildroot}%{_sysconfdir}/logrotate.d/salt
|
||||
install -p -m 0644 %{_salt_src}/pkg/common/logrotate/salt-common %{buildroot}%{_sysconfdir}/logrotate.d/salt
|
||||
|
||||
# Bash completion
|
||||
mkdir -p %{buildroot}%{_sysconfdir}/bash_completion.d/
|
||||
|
|
46
pkg/tests/integration/test_logrotate_config.py
Normal file
46
pkg/tests/integration/test_logrotate_config.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
"""
|
||||
Tests for logrotate config
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
||||
import packaging.version
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_unless_on_linux,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logrotate_config_file(grains):
|
||||
"""
|
||||
Fixture for logrotate config file path
|
||||
"""
|
||||
if grains["os_family"] == "RedHat":
|
||||
return pathlib.Path("/etc/logrotate.d", "salt")
|
||||
elif grains["os_family"] == "Debian":
|
||||
return pathlib.Path("/etc/logrotate.d", "salt-common")
|
||||
|
||||
|
||||
def test_logrotate_config(logrotate_config_file):
|
||||
"""
|
||||
Test that logrotate config has been installed in correctly
|
||||
"""
|
||||
assert logrotate_config_file.is_file()
|
||||
assert logrotate_config_file.owner() == "root"
|
||||
assert logrotate_config_file.group() == "root"
|
||||
|
||||
|
||||
def test_issue_65231_etc_logrotate_salt_dir_removed(install_salt):
|
||||
"""
|
||||
Test that /etc/logrotate.d/salt is not a directory
|
||||
"""
|
||||
if install_salt.prev_version and packaging.version.parse(
|
||||
install_salt.prev_version
|
||||
) <= packaging.version.parse("3006.4"):
|
||||
pytest.skip("Testing a downgrade to 3006.4, do not run")
|
||||
|
||||
path = pathlib.Path("/etc/logrotate.d/salt")
|
||||
if path.exists():
|
||||
assert path.is_dir() is False
|
|
@ -1,11 +1,8 @@
|
|||
# This is a compilation of requirements installed on salt-jenkins git.salt state run
|
||||
# XXX: Temporarily do not install pylxd.
|
||||
# pylxd(or likely ws4py) will cause the test suite to hang at the finish line under runtests.py
|
||||
# pylxd>=2.2.5
|
||||
|
||||
--constraint=../pkg/py{py_version}/{platform}.txt
|
||||
|
||||
pygit2>=1.2.0
|
||||
pygit2>=1.10.1
|
||||
yamllint
|
||||
mercurial
|
||||
hglib
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
--constraint=../pkg/py{py_version}/{platform}.txt
|
||||
|
||||
pyiface
|
||||
pygit2>=1.4.0
|
||||
pygit2>=1.10.1
|
||||
pymysql>=1.0.2
|
||||
ansible>=4.4.0,<5.0.1; python_version < '3.9'
|
||||
ansible>=7.0.0; python_version >= '3.9'
|
||||
|
|
|
@ -275,7 +275,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -335,7 +335,7 @@ pydantic==1.10.8
|
|||
# inflect
|
||||
pyeapi==1.0.0
|
||||
# via napalm
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/darwin.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
|
@ -367,7 +367,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -554,7 +554,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -270,7 +270,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -359,7 +359,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -540,7 +540,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -295,7 +295,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -353,7 +353,7 @@ pydantic==1.10.8
|
|||
# inflect
|
||||
pyeapi==1.0.0
|
||||
# via napalm
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pyiface==0.0.11
|
||||
# via -r requirements/static/ci/linux.in
|
||||
|
@ -361,7 +361,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat
|
|||
# via -r requirements/static/ci/common.in
|
||||
pyjwt==2.7.0
|
||||
# via twilio
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
|
@ -393,7 +393,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -604,7 +604,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -286,13 +286,13 @@ pydantic==1.10.8
|
|||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# inflect
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/windows.in
|
||||
pymssql==2.2.7
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
|
@ -320,7 +320,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -489,7 +489,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -273,7 +273,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -331,7 +331,7 @@ pydantic==1.10.8
|
|||
# inflect
|
||||
pyeapi==1.0.0
|
||||
# via napalm
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/darwin.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
|
@ -520,7 +520,7 @@ typing-extensions==4.8.0
|
|||
# pydantic
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
urllib3==1.26.14
|
||||
urllib3==1.26.18
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
|
||||
# botocore
|
||||
|
@ -548,7 +548,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -268,7 +268,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -536,7 +536,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -291,7 +291,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -349,7 +349,7 @@ pydantic==1.10.8
|
|||
# inflect
|
||||
pyeapi==1.0.0
|
||||
# via napalm
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pyiface==0.0.11
|
||||
# via -r requirements/static/ci/linux.in
|
||||
|
@ -357,7 +357,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat
|
|||
# via -r requirements/static/ci/common.in
|
||||
pyjwt==2.7.0
|
||||
# via twilio
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
|
@ -598,7 +598,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -284,13 +284,13 @@ pydantic==1.10.8
|
|||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# inflect
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/windows.in
|
||||
pymssql==2.2.7
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
|
@ -318,7 +318,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -485,7 +485,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -274,7 +274,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -363,7 +363,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -544,7 +544,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -299,7 +299,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -357,7 +357,7 @@ pydantic==1.10.8
|
|||
# inflect
|
||||
pyeapi==1.0.0
|
||||
# via napalm
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pyiface==0.0.11
|
||||
# via -r requirements/static/ci/linux.in
|
||||
|
@ -365,7 +365,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat
|
|||
# via -r requirements/static/ci/common.in
|
||||
pyjwt==2.7.0
|
||||
# via twilio
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
|
@ -397,7 +397,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -608,7 +608,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -290,13 +290,13 @@ pydantic==1.10.8
|
|||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# inflect
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/windows.in
|
||||
pymssql==2.2.7
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
|
@ -324,7 +324,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -494,7 +494,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -275,7 +275,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -335,7 +335,7 @@ pydantic==1.10.8
|
|||
# inflect
|
||||
pyeapi==1.0.0
|
||||
# via napalm
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/darwin.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
|
@ -367,7 +367,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -554,7 +554,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -270,7 +270,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -359,7 +359,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -540,7 +540,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -297,7 +297,7 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
# via certvalidator
|
||||
|
@ -355,7 +355,7 @@ pydantic==1.10.8
|
|||
# inflect
|
||||
pyeapi==1.0.0
|
||||
# via napalm
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pyiface==0.0.11
|
||||
# via -r requirements/static/ci/linux.in
|
||||
|
@ -363,7 +363,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat
|
|||
# via -r requirements/static/ci/common.in
|
||||
pyjwt==2.7.0
|
||||
# via twilio
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/static/ci/linux.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
|
@ -395,7 +395,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -606,7 +606,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -286,13 +286,13 @@ pydantic==1.10.8
|
|||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# inflect
|
||||
pygit2==1.12.1
|
||||
pygit2==1.13.1
|
||||
# via -r requirements/static/ci/windows.in
|
||||
pymssql==2.2.7
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# -r requirements/windows.txt
|
||||
|
@ -320,7 +320,7 @@ pytest-salt-factories==1.0.0rc27
|
|||
# via -r requirements/pytest.txt
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
pytest-skip-markers==1.5.0
|
||||
# via
|
||||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
|
@ -490,7 +490,7 @@ websocket-client==0.40.0
|
|||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.3.6
|
||||
werkzeug==3.0.1
|
||||
# via
|
||||
# moto
|
||||
# pytest-httpserver
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
dmidecode
|
||||
patch
|
||||
pygit2>=1.2.0
|
||||
pygit2>=1.10.1
|
||||
sed
|
||||
pywinrm>=0.4.1
|
||||
yamllint
|
||||
|
|
|
@ -91,7 +91,7 @@ pydantic==1.10.8
|
|||
# via inflect
|
||||
pymssql==2.2.7
|
||||
# via -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/windows.txt
|
||||
pyopenssl==23.2.0
|
||||
# via -r requirements/windows.txt
|
||||
|
|
|
@ -115,7 +115,7 @@ tornado==6.3.3
|
|||
# via -r requirements/base.txt
|
||||
typing-extensions==4.8.0
|
||||
# via pydantic
|
||||
urllib3==1.26.14
|
||||
urllib3==1.26.18
|
||||
# via requests
|
||||
vultr==1.0.1
|
||||
# via -r requirements/darwin.txt
|
||||
|
|
|
@ -91,7 +91,7 @@ pydantic==1.10.8
|
|||
# via inflect
|
||||
pymssql==2.2.7
|
||||
# via -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/windows.txt
|
||||
pyopenssl==23.2.0
|
||||
# via -r requirements/windows.txt
|
||||
|
|
|
@ -93,7 +93,7 @@ pydantic==1.10.8
|
|||
# via inflect
|
||||
pymssql==2.2.7
|
||||
# via -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/windows.txt
|
||||
pyopenssl==23.2.0
|
||||
# via -r requirements/windows.txt
|
||||
|
|
|
@ -91,7 +91,7 @@ pydantic==1.10.8
|
|||
# via inflect
|
||||
pymssql==2.2.7
|
||||
# via -r requirements/windows.txt
|
||||
pymysql==1.0.3
|
||||
pymysql==1.1.0
|
||||
# via -r requirements/windows.txt
|
||||
pyopenssl==23.2.0
|
||||
# via -r requirements/windows.txt
|
||||
|
|
|
@ -16,6 +16,7 @@ import socket
|
|||
import threading
|
||||
import time
|
||||
import urllib
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
import tornado
|
||||
|
@ -36,6 +37,7 @@ import salt.utils.platform
|
|||
import salt.utils.versions
|
||||
from salt.exceptions import SaltClientError, SaltReqTimeoutError
|
||||
from salt.utils.network import ip_bracket
|
||||
from salt.utils.process import SignalHandlingProcess
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
USE_LOAD_BALANCER = True
|
||||
|
@ -134,66 +136,63 @@ def _set_tcp_keepalive(sock, opts):
|
|||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0)
|
||||
|
||||
|
||||
if USE_LOAD_BALANCER:
|
||||
class LoadBalancerServer(SignalHandlingProcess):
|
||||
"""
|
||||
Raw TCP server which runs in its own process and will listen
|
||||
for incoming connections. Each incoming connection will be
|
||||
sent via multiprocessing queue to the workers.
|
||||
Since the queue is shared amongst workers, only one worker will
|
||||
handle a given connection.
|
||||
"""
|
||||
|
||||
class LoadBalancerServer(SignalHandlingProcess):
|
||||
"""
|
||||
Raw TCP server which runs in its own process and will listen
|
||||
for incoming connections. Each incoming connection will be
|
||||
sent via multiprocessing queue to the workers.
|
||||
Since the queue is shared amongst workers, only one worker will
|
||||
handle a given connection.
|
||||
"""
|
||||
# TODO: opts!
|
||||
# Based on default used in tornado.netutil.bind_sockets()
|
||||
backlog = 128
|
||||
|
||||
# TODO: opts!
|
||||
# Based on default used in tornado.netutil.bind_sockets()
|
||||
backlog = 128
|
||||
def __init__(self, opts, socket_queue, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.opts = opts
|
||||
self.socket_queue = socket_queue
|
||||
self._socket = None
|
||||
|
||||
def __init__(self, opts, socket_queue, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.opts = opts
|
||||
self.socket_queue = socket_queue
|
||||
def close(self):
|
||||
if self._socket is not None:
|
||||
self._socket.shutdown(socket.SHUT_RDWR)
|
||||
self._socket.close()
|
||||
self._socket = None
|
||||
|
||||
def close(self):
|
||||
if self._socket is not None:
|
||||
self._socket.shutdown(socket.SHUT_RDWR)
|
||||
self._socket.close()
|
||||
self._socket = None
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
self.close()
|
||||
# pylint: enable=W1701
|
||||
|
||||
# pylint: enable=W1701
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Start the load balancer
|
||||
"""
|
||||
self._socket = _get_socket(self.opts)
|
||||
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
_set_tcp_keepalive(self._socket, self.opts)
|
||||
self._socket.setblocking(1)
|
||||
self._socket.bind(_get_bind_addr(self.opts, "ret_port"))
|
||||
self._socket.listen(self.backlog)
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Wait for a connection to occur since the socket is
|
||||
# blocking.
|
||||
connection, address = self._socket.accept()
|
||||
# Wait for a free slot to be available to put
|
||||
# the connection into.
|
||||
# Sockets are picklable on Windows in Python 3.
|
||||
self.socket_queue.put((connection, address), True, None)
|
||||
except OSError as e:
|
||||
# ECONNABORTED indicates that there was a connection
|
||||
# but it was closed while still in the accept queue.
|
||||
# (observed on FreeBSD).
|
||||
if tornado.util.errno_from_exception(e) == errno.ECONNABORTED:
|
||||
continue
|
||||
raise
|
||||
def run(self):
|
||||
"""
|
||||
Start the load balancer
|
||||
"""
|
||||
self._socket = _get_socket(self.opts)
|
||||
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
_set_tcp_keepalive(self._socket, self.opts)
|
||||
self._socket.setblocking(1)
|
||||
self._socket.bind(_get_bind_addr(self.opts, "ret_port"))
|
||||
self._socket.listen(self.backlog)
|
||||
while True:
|
||||
try:
|
||||
# Wait for a connection to occur since the socket is
|
||||
# blocking.
|
||||
connection, address = self._socket.accept()
|
||||
# Wait for a free slot to be available to put
|
||||
# the connection into.
|
||||
# Sockets are picklable on Windows in Python 3.
|
||||
self.socket_queue.put((connection, address), True, None)
|
||||
except OSError as e:
|
||||
# ECONNABORTED indicates that there was a connection
|
||||
# but it was closed while still in the accept queue.
|
||||
# (observed on FreeBSD).
|
||||
if tornado.util.errno_from_exception(e) == errno.ECONNABORTED:
|
||||
continue
|
||||
raise
|
||||
|
||||
|
||||
class Resolver(tornado.netutil.DefaultLoopResolver):
|
||||
|
@ -332,8 +331,6 @@ class TCPPubClient(salt.transport.base.PublishClient):
|
|||
self._closed = False
|
||||
self._stream = await self.getstream(timeout=timeout)
|
||||
if self._stream:
|
||||
# if not self._stream_return_running:
|
||||
# self.io_loop.spawn_callback(self._stream_return)
|
||||
if self.connect_callback:
|
||||
self.connect_callback(True)
|
||||
self.connected = True
|
||||
|
@ -650,45 +647,43 @@ class SaltMessageServer(tornado.tcpserver.TCPServer):
|
|||
raise
|
||||
|
||||
|
||||
if USE_LOAD_BALANCER:
|
||||
class LoadBalancerWorker(SaltMessageServer):
|
||||
"""
|
||||
This will receive TCP connections from 'LoadBalancerServer' via
|
||||
a multiprocessing queue.
|
||||
Since the queue is shared amongst workers, only one worker will handle
|
||||
a given connection.
|
||||
"""
|
||||
|
||||
class LoadBalancerWorker(SaltMessageServer):
|
||||
"""
|
||||
This will receive TCP connections from 'LoadBalancerServer' via
|
||||
a multiprocessing queue.
|
||||
Since the queue is shared amongst workers, only one worker will handle
|
||||
a given connection.
|
||||
"""
|
||||
def __init__(self, socket_queue, message_handler, *args, **kwargs):
|
||||
super().__init__(message_handler, *args, **kwargs)
|
||||
self.socket_queue = socket_queue
|
||||
self._stop = threading.Event()
|
||||
self.thread = threading.Thread(target=self.socket_queue_thread)
|
||||
self.thread.start()
|
||||
|
||||
def __init__(self, socket_queue, message_handler, *args, **kwargs):
|
||||
super().__init__(message_handler, *args, **kwargs)
|
||||
self.socket_queue = socket_queue
|
||||
self._stop = threading.Event()
|
||||
self.thread = threading.Thread(target=self.socket_queue_thread)
|
||||
self.thread.start()
|
||||
def close(self):
|
||||
self._stop.set()
|
||||
self.thread.join()
|
||||
super().close()
|
||||
|
||||
def close(self):
|
||||
self._stop.set()
|
||||
self.thread.join()
|
||||
super().close()
|
||||
|
||||
def socket_queue_thread(self):
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
client_socket, address = self.socket_queue.get(True, 1)
|
||||
except queue.Empty:
|
||||
if self._stop.is_set():
|
||||
break
|
||||
continue
|
||||
# 'self.io_loop' initialized in super class
|
||||
# 'tornado.tcpserver.TCPServer'.
|
||||
# 'self._handle_connection' defined in same super class.
|
||||
self.io_loop.spawn_callback(
|
||||
self._handle_connection, client_socket, address
|
||||
)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
pass
|
||||
def socket_queue_thread(self):
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
client_socket, address = self.socket_queue.get(True, 1)
|
||||
except queue.Empty:
|
||||
if self._stop.is_set():
|
||||
break
|
||||
continue
|
||||
# 'self.io_loop' initialized in super class
|
||||
# 'salt.ext.tornado.tcpserver.TCPServer'.
|
||||
# 'self._handle_connection' defined in same super class.
|
||||
self.io_loop.spawn_callback(
|
||||
self._handle_connection, client_socket, address
|
||||
)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
pass
|
||||
|
||||
|
||||
class TCPClientKeepAlive(tornado.tcpclient.TCPClient):
|
||||
|
@ -749,10 +744,7 @@ class MessageClient:
|
|||
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
|
||||
with salt.utils.asynchronous.current_ioloop(self.io_loop):
|
||||
self._tcp_client = TCPClientKeepAlive(opts, resolver=resolver)
|
||||
self._mid = 1
|
||||
self._max_messages = int((1 << 31) - 2) # number of IDs before we wrap
|
||||
# TODO: max queue size
|
||||
self.send_queue = [] # queue of messages to be sent
|
||||
self.send_future_map = {} # mapping of request_id -> Future
|
||||
|
||||
self._read_until_future = None
|
||||
|
@ -765,13 +757,9 @@ class MessageClient:
|
|||
|
||||
self.backoff = opts.get("tcp_reconnect_backoff", 1)
|
||||
|
||||
def _stop_io_loop(self):
|
||||
if self.io_loop is not None:
|
||||
self.io_loop.stop()
|
||||
|
||||
# TODO: timeout inflight sessions
|
||||
def close(self):
|
||||
if self._closing:
|
||||
if self._closing or self._closed:
|
||||
return
|
||||
self._closing = True
|
||||
self.io_loop.add_timeout(1, self.check_close)
|
||||
|
@ -902,18 +890,7 @@ class MessageClient:
|
|||
self._stream_return_running = False
|
||||
|
||||
def _message_id(self):
|
||||
wrap = False
|
||||
while self._mid in self.send_future_map:
|
||||
if self._mid >= self._max_messages:
|
||||
if wrap:
|
||||
# this shouldn't ever happen, but just in case
|
||||
raise Exception("Unable to find available messageid")
|
||||
self._mid = 1
|
||||
wrap = True
|
||||
else:
|
||||
self._mid += 1
|
||||
|
||||
return self._mid
|
||||
return str(uuid.uuid4())
|
||||
|
||||
# TODO: return a message object which takes care of multiplexing?
|
||||
def on_recv(self, callback):
|
||||
|
@ -1048,7 +1025,7 @@ class PubServer(tornado.tcpserver.TCPServer):
|
|||
return
|
||||
self._closing = True
|
||||
for client in self.clients:
|
||||
client.stream.disconnect()
|
||||
client.stream.close()
|
||||
|
||||
# pylint: disable=W1701
|
||||
def __del__(self):
|
||||
|
|
|
@ -19,6 +19,7 @@ import tornado.ioloop
|
|||
import tornado.locks
|
||||
import zmq.asyncio
|
||||
import zmq.error
|
||||
import zmq.eventloop.future
|
||||
import zmq.eventloop.zmqstream
|
||||
|
||||
import salt.payload
|
||||
|
@ -633,16 +634,15 @@ class AsyncReqMessageClient:
|
|||
else:
|
||||
self.io_loop = io_loop
|
||||
|
||||
self.context = zmq.Context()
|
||||
self.context = zmq.eventloop.future.Context()
|
||||
|
||||
self.send_queue = []
|
||||
|
||||
self._closing = False
|
||||
self._future = None
|
||||
self.lock = tornado.locks.Lock()
|
||||
|
||||
def connect(self):
|
||||
if hasattr(self, "stream"):
|
||||
if hasattr(self, "socket") and self.socket:
|
||||
return
|
||||
# wire up sockets
|
||||
self._init_socket()
|
||||
|
@ -658,24 +658,10 @@ class AsyncReqMessageClient:
|
|||
return
|
||||
else:
|
||||
self._closing = True
|
||||
if hasattr(self, "stream") and self.stream is not None:
|
||||
if ZMQ_VERSION_INFO < (14, 3, 0):
|
||||
# stream.close() doesn't work properly on pyzmq < 14.3.0
|
||||
if self.stream.socket:
|
||||
self.stream.socket.close()
|
||||
self.stream.io_loop.remove_handler(self.stream.socket)
|
||||
# set this to None, more hacks for messed up pyzmq
|
||||
self.stream.socket = None
|
||||
self.socket.close()
|
||||
else:
|
||||
self.stream.close(1)
|
||||
self.socket = None
|
||||
self.stream = None
|
||||
if self._future:
|
||||
self._future.set_exception(SaltException("Closing connection"))
|
||||
self._future = None
|
||||
if hasattr(self, "socket") and self.socket is not None:
|
||||
self.socket.close(0)
|
||||
self.socket = None
|
||||
if self.context.closed is False:
|
||||
# This hangs if closing the stream causes an import error
|
||||
self.context.term()
|
||||
|
||||
def _init_socket(self):
|
||||
|
@ -692,23 +678,8 @@ class AsyncReqMessageClient:
|
|||
self.socket.setsockopt(zmq.IPV6, 1)
|
||||
elif hasattr(zmq, "IPV4ONLY"):
|
||||
self.socket.setsockopt(zmq.IPV4ONLY, 0)
|
||||
self.socket.linger = self.linger
|
||||
self.socket.setsockopt(zmq.LINGER, self.linger)
|
||||
self.socket.connect(self.addr)
|
||||
self.stream = zmq.eventloop.zmqstream.ZMQStream(
|
||||
self.socket, io_loop=self.io_loop
|
||||
)
|
||||
self.stream.on_recv(self.handle_reply)
|
||||
|
||||
def timeout_message(self, future):
|
||||
"""
|
||||
Handle a message timeout by removing it from the sending queue
|
||||
and informing the caller
|
||||
|
||||
:raises: SaltReqTimeoutError
|
||||
"""
|
||||
if self._future == future:
|
||||
self._future = None
|
||||
future.set_exception(SaltReqTimeoutError("Message timed out"))
|
||||
|
||||
@tornado.gen.coroutine
|
||||
def send(self, message, timeout=None, callback=None):
|
||||
|
@ -732,20 +703,27 @@ class AsyncReqMessageClient:
|
|||
|
||||
if timeout is not None:
|
||||
send_timeout = self.io_loop.call_later(
|
||||
timeout, self.timeout_message, future
|
||||
timeout, self._timeout_message, future
|
||||
)
|
||||
|
||||
with (yield self.lock.acquire()):
|
||||
self._future = future
|
||||
yield self.stream.send(message)
|
||||
recv = yield future
|
||||
self.io_loop.spawn_callback(self._send_recv, message, future)
|
||||
|
||||
recv = yield future
|
||||
|
||||
raise tornado.gen.Return(recv)
|
||||
|
||||
def handle_reply(self, msg):
|
||||
data = salt.payload.loads(msg[0])
|
||||
future = self._future
|
||||
self._future = None
|
||||
future.set_result(data)
|
||||
def _timeout_message(self, future):
|
||||
if not future.done():
|
||||
future.set_exception(SaltReqTimeoutError("Message timed out"))
|
||||
|
||||
@tornado.gen.coroutine
|
||||
def _send_recv(self, message, future):
|
||||
with (yield self.lock.acquire()):
|
||||
yield self.socket.send(message)
|
||||
recv = yield self.socket.recv()
|
||||
if not future.done():
|
||||
data = salt.payload.loads(recv)
|
||||
future.set_result(data)
|
||||
|
||||
|
||||
class ZeroMQSocketMonitor:
|
||||
|
@ -881,7 +859,7 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
|
|||
run in a thread or process as it creates and runs an it's own ioloop.
|
||||
"""
|
||||
ioloop = tornado.ioloop.IOLoop()
|
||||
ioloop.add_callback(self.publisher, publish_payload)
|
||||
ioloop.add_callback(self.publisher, publish_payload, ioloop=ioloop)
|
||||
try:
|
||||
ioloop.start()
|
||||
finally:
|
||||
|
@ -936,7 +914,6 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
|
|||
async def publisher(self, publish_payload, ioloop=None):
|
||||
if ioloop is None:
|
||||
ioloop = tornado.ioloop.IOLoop.current()
|
||||
ioloop.asyncio_loop.set_debug(True)
|
||||
self.daemon_context = zmq.asyncio.Context()
|
||||
(
|
||||
self.daemon_pull_sock,
|
||||
|
@ -1023,13 +1000,14 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
|
|||
ctx = self.ctx
|
||||
self.ctx = None
|
||||
ctx.term()
|
||||
if self.daemon_monitor:
|
||||
self.daemon_monitor.stop()
|
||||
if self.daemon_pub_sock:
|
||||
self.daemon_pub_sock.close()
|
||||
if self.daemon_pull_sock:
|
||||
self.daemon_pull_sock.close()
|
||||
if self.daemon_monitor:
|
||||
self.daemon_monitor.stop()
|
||||
if self.daemon_context:
|
||||
self.daemon_context.destroy(1)
|
||||
self.daemon_context.term()
|
||||
|
||||
async def publish(self, payload, **kwargs):
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
http://stackoverflow.com/questions/6190331/
|
||||
"""
|
||||
|
||||
# pragma: no cover # essentially using Python's OrderDict
|
||||
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
|
@ -72,9 +74,7 @@ except (ImportError, AttributeError):
|
|||
"""
|
||||
super().__init__()
|
||||
if len(args) > 1:
|
||||
raise TypeError(
|
||||
"expected at most 1 arguments, got {}".format(len(args))
|
||||
)
|
||||
raise TypeError(f"expected at most 1 arguments, got {len(args)}")
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
|
@ -248,7 +248,7 @@ except (ImportError, AttributeError):
|
|||
_repr_running[call_key] = 1
|
||||
try:
|
||||
if not self:
|
||||
return "{}()".format(self.__class__.__name__)
|
||||
return f"{self.__class__.__name__}()"
|
||||
return "{}('{}')".format(
|
||||
self.__class__.__name__, list(self.items())
|
||||
)
|
||||
|
|
|
@ -162,6 +162,7 @@ def test_pub_server_channel(
|
|||
log.debug("Payload handler got %r", payload)
|
||||
|
||||
req_server_channel.post_fork(handle_payload, io_loop=io_loop)
|
||||
|
||||
if master_config["transport"] == "zeromq":
|
||||
p = Path(str(master_config["sock_dir"])) / "workers.ipc"
|
||||
start = time.time()
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
import asyncio
|
||||
import multiprocessing
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.transport.tcp
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.core_test,
|
||||
]
|
||||
|
||||
|
||||
def test_tcp_load_balancer_server(master_opts, io_loop):
|
||||
|
||||
messages = []
|
||||
|
||||
def handler(stream, message, header):
|
||||
messages.append(message)
|
||||
|
||||
queue = multiprocessing.Queue()
|
||||
server = salt.transport.tcp.LoadBalancerServer(master_opts, queue)
|
||||
worker = salt.transport.tcp.LoadBalancerWorker(queue, handler, io_loop=io_loop)
|
||||
|
||||
def run_loop():
|
||||
try:
|
||||
io_loop.start()
|
||||
except Exception as exc:
|
||||
print(f"Caught exeption {exc}")
|
||||
|
||||
thread = threading.Thread(target=server.run)
|
||||
thread.start()
|
||||
|
||||
# Wait for bind to happen.
|
||||
time.sleep(0.5)
|
||||
|
||||
package = {"foo": "bar"}
|
||||
payload = salt.transport.frame.frame_msg(package)
|
||||
sock = socket.socket()
|
||||
sock.connect(("127.0.0.1", master_opts["ret_port"]))
|
||||
sock.send(payload)
|
||||
|
||||
start = time.monotonic()
|
||||
|
||||
async def check_test():
|
||||
while not messages:
|
||||
await asyncio.sleep(0.3)
|
||||
if time.monotonic() - start > 30:
|
||||
break
|
||||
|
||||
io_loop.run_sync(lambda: check_test())
|
||||
|
||||
try:
|
||||
if time.monotonic() - start > 30:
|
||||
assert False, "Took longer than 30 seconds to receive message"
|
||||
|
||||
assert [package] == messages
|
||||
finally:
|
||||
server.close()
|
||||
thread.join()
|
||||
worker.close()
|
66
tests/pytests/functional/transport/tcp/test_pub_server.py
Normal file
66
tests/pytests/functional/transport/tcp/test_pub_server.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
import os
|
||||
import threading
|
||||
import time
|
||||
|
||||
import tornado.gen
|
||||
|
||||
import salt.transport.tcp
|
||||
|
||||
|
||||
async def test_pub_channel(master_opts, minion_opts, io_loop):
|
||||
def presence_callback(client):
|
||||
pass
|
||||
|
||||
def remove_presence_callback(client):
|
||||
pass
|
||||
|
||||
master_opts["transport"] = "tcp"
|
||||
minion_opts.update(master_ip="127.0.0.1", transport="tcp")
|
||||
|
||||
server = salt.transport.tcp.TCPPublishServer(
|
||||
master_opts,
|
||||
pub_host="127.0.0.1",
|
||||
pub_port=master_opts["publish_port"],
|
||||
pull_path=os.path.join(master_opts["sock_dir"], "publish_pull.ipc"),
|
||||
)
|
||||
|
||||
client = salt.transport.tcp.TCPPubClient(
|
||||
minion_opts,
|
||||
io_loop,
|
||||
host="127.0.0.1",
|
||||
port=master_opts["publish_port"],
|
||||
)
|
||||
|
||||
payloads = []
|
||||
|
||||
publishes = []
|
||||
|
||||
async def publish_payload(payload, callback):
|
||||
await server.publish_payload(payload)
|
||||
payloads.append(payload)
|
||||
|
||||
async def on_recv(message):
|
||||
publishes.append(message)
|
||||
|
||||
io_loop.add_callback(
|
||||
server.publisher, publish_payload, presence_callback, remove_presence_callback
|
||||
)
|
||||
|
||||
# Wait for socket to bind.
|
||||
await tornado.gen.sleep(3)
|
||||
|
||||
await client.connect(master_opts["publish_port"])
|
||||
client.on_recv(on_recv)
|
||||
|
||||
await server.publish({"meh": "bah"})
|
||||
|
||||
start = time.monotonic()
|
||||
try:
|
||||
while not publishes:
|
||||
await tornado.gen.sleep(0.3)
|
||||
if time.monotonic() - start > 30:
|
||||
assert False, "Message not published after 30 seconds"
|
||||
finally:
|
||||
server.close()
|
||||
server.pub_server.close()
|
||||
client.close()
|
|
@ -1,7 +1,11 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.transport.zeromq
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.pytest.transport import PubServerChannelProcess
|
||||
|
||||
|
@ -51,3 +55,112 @@ def test_zeromq_filtering(salt_master, salt_minion):
|
|||
assert len(results) == send_num, "{} != {}, difference: {}".format(
|
||||
len(results), send_num, set(expect).difference(results)
|
||||
)
|
||||
|
||||
|
||||
async def test_pub_channel(master_opts, io_loop):
|
||||
|
||||
server = salt.transport.zeromq.PublishServer(
|
||||
master_opts,
|
||||
pub_host="127.0.0.1",
|
||||
pub_port=4506,
|
||||
pull_path=os.path.join(master_opts["sock_dir"], "publish_pull.ipc"),
|
||||
)
|
||||
|
||||
payloads = []
|
||||
|
||||
async def publish_payload(payload):
|
||||
await server.publish_payload(payload)
|
||||
payloads.append(payload)
|
||||
|
||||
io_loop.add_callback(
|
||||
server.publisher,
|
||||
publish_payload,
|
||||
ioloop=io_loop,
|
||||
)
|
||||
|
||||
await asyncio.sleep(3)
|
||||
|
||||
await server.publish(salt.payload.dumps({"meh": "bah"}))
|
||||
|
||||
start = time.monotonic()
|
||||
|
||||
try:
|
||||
while not payloads:
|
||||
await asyncio.sleep(0.3)
|
||||
if time.monotonic() - start > 30:
|
||||
assert False, "No message received after 30 seconds"
|
||||
assert payloads
|
||||
finally:
|
||||
server.close()
|
||||
|
||||
|
||||
async def test_pub_channel_filtering(master_opts, io_loop):
|
||||
master_opts["zmq_filtering"] = True
|
||||
|
||||
server = salt.transport.zeromq.PublishServer(
|
||||
master_opts,
|
||||
pub_host="127.0.0.1",
|
||||
pub_port=4506,
|
||||
pull_path=os.path.join(master_opts["sock_dir"], "publish_pull.ipc"),
|
||||
)
|
||||
|
||||
payloads = []
|
||||
|
||||
async def publish_payload(payload):
|
||||
await server.publish_payload(payload)
|
||||
payloads.append(payload)
|
||||
|
||||
io_loop.add_callback(
|
||||
server.publisher,
|
||||
publish_payload,
|
||||
ioloop=io_loop,
|
||||
)
|
||||
|
||||
await asyncio.sleep(3)
|
||||
|
||||
await server.publish(salt.payload.dumps({"meh": "bah"}))
|
||||
|
||||
start = time.monotonic()
|
||||
try:
|
||||
while not payloads:
|
||||
await asyncio.sleep(0.3)
|
||||
if time.monotonic() - start > 30:
|
||||
assert False, "No message received after 30 seconds"
|
||||
finally:
|
||||
server.close()
|
||||
|
||||
|
||||
async def test_pub_channel_filtering_topic(master_opts, io_loop):
|
||||
master_opts["zmq_filtering"] = True
|
||||
|
||||
server = salt.transport.zeromq.PublishServer(
|
||||
master_opts,
|
||||
pub_host="127.0.0.1",
|
||||
pub_port=4506,
|
||||
pull_path=os.path.join(master_opts["sock_dir"], "publish_pull.ipc"),
|
||||
)
|
||||
|
||||
payloads = []
|
||||
|
||||
async def publish_payload(payload):
|
||||
await server.publish_payload(payload, topic_list=["meh"])
|
||||
payloads.append(payload)
|
||||
|
||||
io_loop.add_callback(
|
||||
server.publisher,
|
||||
publish_payload,
|
||||
ioloop=io_loop,
|
||||
)
|
||||
|
||||
await asyncio.sleep(3)
|
||||
|
||||
await server.publish(salt.payload.dumps({"meh": "bah"}))
|
||||
|
||||
start = time.monotonic()
|
||||
try:
|
||||
while not payloads:
|
||||
await asyncio.sleep(0.3)
|
||||
if time.monotonic() - start > 30:
|
||||
assert False, "No message received after 30 seconds"
|
||||
finally:
|
||||
server.close()
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
import asyncio
|
||||
import os
|
||||
import socket
|
||||
import warnings
|
||||
|
||||
import attr
|
||||
import pytest
|
||||
import tornado
|
||||
import tornado.concurrent
|
||||
import tornado.gen
|
||||
import tornado.ioloop
|
||||
import tornado.iostream
|
||||
from pytestshellutils.utils import ports
|
||||
|
||||
import salt.channel.server
|
||||
|
@ -18,7 +23,7 @@ pytestmark = [
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_keys():
|
||||
def _fake_keys():
|
||||
with patch("salt.crypt.AsyncAuth.get_keys", autospec=True):
|
||||
yield
|
||||
|
||||
|
@ -30,7 +35,7 @@ def fake_crypto():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_authd(io_loop):
|
||||
def _fake_authd(io_loop):
|
||||
@tornado.gen.coroutine
|
||||
def return_nothing():
|
||||
raise tornado.gen.Return()
|
||||
|
@ -49,12 +54,24 @@ def fake_authd(io_loop):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_crypticle():
|
||||
def _fake_crypticle():
|
||||
with patch("salt.crypt.Crypticle") as fake_crypticle:
|
||||
fake_crypticle.generate_key_string.return_value = "fakey fake"
|
||||
yield fake_crypticle
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _squash_exepected_message_client_warning():
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
"ignore",
|
||||
message="MessageClient has been deprecated and will be removed.",
|
||||
category=DeprecationWarning,
|
||||
module="salt.transport.tcp",
|
||||
)
|
||||
yield
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True)
|
||||
class ClientSocket:
|
||||
listen_on = attr.ib(init=False, default="127.0.0.1")
|
||||
|
@ -81,47 +98,35 @@ def client_socket():
|
|||
yield _client_socket
|
||||
|
||||
|
||||
async def test_message_client_cleanup_on_close(
|
||||
client_socket, temp_salt_master, io_loop
|
||||
):
|
||||
@pytest.mark.usefixtures("_squash_exepected_message_client_warning")
|
||||
async def test_message_client_cleanup_on_close(client_socket, temp_salt_master):
|
||||
"""
|
||||
test message client cleanup on close
|
||||
"""
|
||||
|
||||
orig_loop = io_loop
|
||||
|
||||
opts = dict(temp_salt_master.config.copy(), transport="tcp")
|
||||
client = salt.transport.tcp.MessageClient(
|
||||
opts, client_socket.listen_on, client_socket.port
|
||||
)
|
||||
|
||||
# Mock the io_loop's stop method so we know when it has been called.
|
||||
orig_loop.real_stop = orig_loop.stop
|
||||
orig_loop.stop_called = False
|
||||
assert client._closed is False
|
||||
assert client._closing is False
|
||||
assert client._stream is None
|
||||
|
||||
def stop(*args, **kwargs):
|
||||
orig_loop.stop_called = True
|
||||
orig_loop.real_stop()
|
||||
await client.connect()
|
||||
|
||||
orig_loop.stop = stop
|
||||
try:
|
||||
assert client.io_loop == orig_loop
|
||||
await client.connect()
|
||||
# Ensure we are testing the _read_until_future and io_loop teardown
|
||||
assert client._stream is not None
|
||||
|
||||
# Ensure we are testing the _read_until_future and io_loop teardown
|
||||
assert client._stream is not None
|
||||
client.close()
|
||||
assert client._closed is False
|
||||
assert client._closing is True
|
||||
assert client._stream is not None
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
# The run_sync call will set stop_called, reset it
|
||||
# orig_loop.stop_called = False
|
||||
client.close()
|
||||
|
||||
# Stop should be called again, client's io_loop should be None
|
||||
# assert orig_loop.stop_called is True
|
||||
# assert client.io_loop is None
|
||||
finally:
|
||||
orig_loop.stop = orig_loop.real_stop
|
||||
del orig_loop.real_stop
|
||||
del orig_loop.stop_called
|
||||
assert client._closed is True
|
||||
assert client._closing is False
|
||||
assert client._stream is None
|
||||
|
||||
|
||||
async def test_async_tcp_pub_channel_connect_publish_port(
|
||||
|
@ -156,6 +161,7 @@ async def test_async_tcp_pub_channel_connect_publish_port(
|
|||
# The first call to the mock is the instance's __init__, and the first argument to those calls is the opts dict
|
||||
await asyncio.sleep(0.3)
|
||||
assert channel.transport.connect.call_args[0][0] == opts["publish_port"]
|
||||
transport.close()
|
||||
|
||||
|
||||
def test_tcp_pub_server_channel_publish_filtering(temp_salt_master):
|
||||
|
@ -253,7 +259,10 @@ def salt_message_client():
|
|||
{}, "127.0.0.1", ports.get_unused_localhost_port(), io_loop=io_loop_mock
|
||||
)
|
||||
|
||||
yield client
|
||||
try:
|
||||
yield client
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
|
||||
# XXX we don't reutnr a future anymore, this needs a different way of testing.
|
||||
|
@ -341,6 +350,7 @@ def salt_message_client():
|
|||
# assert message_id_2 not in salt_message_client.send_timeout_map
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_squash_exepected_message_client_warning")
|
||||
def test_timeout_message_unknown_future(salt_message_client):
|
||||
# # test we don't fail on unknown message_id
|
||||
# salt_message_client.timeout_message(-1, "message")
|
||||
|
@ -358,6 +368,7 @@ def test_timeout_message_unknown_future(salt_message_client):
|
|||
assert message_id not in salt_message_client.send_future_map
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_squash_exepected_message_client_warning")
|
||||
def xtest_client_reconnect_backoff(client_socket):
|
||||
opts = {"tcp_reconnect_backoff": 5}
|
||||
|
||||
|
@ -384,8 +395,9 @@ def xtest_client_reconnect_backoff(client_socket):
|
|||
client.close()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_fake_crypticle", "_fake_keys")
|
||||
async def test_when_async_req_channel_with_syndic_role_should_use_syndic_master_pub_file_to_verify_master_sig(
|
||||
fake_keys, fake_crypto, fake_crypticle
|
||||
fake_crypto,
|
||||
):
|
||||
# Syndics use the minion pki dir, but they also create a syndic_master.pub
|
||||
# file for comms with the Salt master
|
||||
|
@ -413,9 +425,8 @@ async def test_when_async_req_channel_with_syndic_role_should_use_syndic_master_
|
|||
assert mock.call_args_list[0][0][0] == expected_pubkey_path
|
||||
|
||||
|
||||
async def test_mixin_should_use_correct_path_when_syndic(
|
||||
fake_keys, fake_authd, fake_crypticle
|
||||
):
|
||||
@pytest.mark.usefixtures("_fake_authd", "_fake_crypticle", "_fake_keys")
|
||||
async def test_mixin_should_use_correct_path_when_syndic():
|
||||
mockloop = MagicMock()
|
||||
expected_pubkey_path = os.path.join("/etc/salt/pki/minion", "syndic_master.pub")
|
||||
opts = {
|
||||
|
@ -439,6 +450,7 @@ async def test_mixin_should_use_correct_path_when_syndic(
|
|||
assert mock.call_args_list[0][0][0] == expected_pubkey_path
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_squash_exepected_message_client_warning")
|
||||
def test_presence_events_callback_passed(temp_salt_master, salt_message_client):
|
||||
opts = dict(temp_salt_master.config.copy(), transport="tcp", presence_events=True)
|
||||
channel = salt.channel.server.PubServerChannel.factory(opts)
|
||||
|
@ -483,3 +495,188 @@ async def test_presence_removed_on_stream_closed():
|
|||
await server.publish_payload(package, None)
|
||||
|
||||
server.remove_presence_callback.assert_called_with(client)
|
||||
|
||||
|
||||
async def test_tcp_pub_client_decode_dict(minion_opts, io_loop, tmp_path):
|
||||
dmsg = {"meh": "bah"}
|
||||
with salt.transport.tcp.TCPPubClient(minion_opts, io_loop, path=tmp_path) as client:
|
||||
ret = client._decode_messages(dmsg)
|
||||
assert ret == dmsg
|
||||
|
||||
|
||||
async def test_tcp_pub_client_decode_msgpack(minion_opts, io_loop, tmp_path):
|
||||
dmsg = {"meh": "bah"}
|
||||
msg = salt.payload.dumps(dmsg)
|
||||
with salt.transport.tcp.TCPPubClient(minion_opts, io_loop, path=tmp_path) as client:
|
||||
ret = client._decode_messages(msg)
|
||||
assert ret == dmsg
|
||||
|
||||
|
||||
def test_tcp_pub_client_close(minion_opts, io_loop, tmp_path):
|
||||
client = salt.transport.tcp.TCPPubClient(minion_opts, io_loop, path=tmp_path)
|
||||
|
||||
stream = MagicMock()
|
||||
|
||||
client._stream = stream
|
||||
client.close()
|
||||
assert client._closing is True
|
||||
assert client._stream is None
|
||||
client.close()
|
||||
stream.close.assert_called_once_with()
|
||||
|
||||
|
||||
async def test_pub_server__stream_read(master_opts, io_loop):
|
||||
|
||||
messages = [salt.transport.frame.frame_msg({"foo": "bar"})]
|
||||
|
||||
class Stream:
|
||||
def __init__(self, messages):
|
||||
self.messages = messages
|
||||
|
||||
def read_bytes(self, *args, **kwargs):
|
||||
if self.messages:
|
||||
msg = self.messages.pop(0)
|
||||
future = tornado.concurrent.Future()
|
||||
future.set_result(msg)
|
||||
return future
|
||||
raise tornado.iostream.StreamClosedError()
|
||||
|
||||
client = MagicMock()
|
||||
client.stream = Stream(messages)
|
||||
client.address = "client address"
|
||||
server = salt.transport.tcp.PubServer(master_opts, io_loop)
|
||||
await server._stream_read(client)
|
||||
client.close.assert_called_once()
|
||||
|
||||
|
||||
async def test_pub_server__stream_read_exception(master_opts, io_loop):
|
||||
client = MagicMock()
|
||||
client.stream = MagicMock()
|
||||
client.stream.read_bytes = MagicMock(
|
||||
side_effect=[
|
||||
Exception("Something went wrong"),
|
||||
tornado.iostream.StreamClosedError(),
|
||||
]
|
||||
)
|
||||
client.address = "client address"
|
||||
server = salt.transport.tcp.PubServer(master_opts, io_loop)
|
||||
await server._stream_read(client)
|
||||
client.close.assert_called_once()
|
||||
|
||||
|
||||
async def test_salt_message_server(master_opts):
|
||||
|
||||
received = []
|
||||
|
||||
def handler(stream, body, header):
|
||||
|
||||
received.append(body)
|
||||
|
||||
server = salt.transport.tcp.SaltMessageServer(handler)
|
||||
msg = {"foo": "bar"}
|
||||
messages = [salt.transport.frame.frame_msg(msg)]
|
||||
|
||||
class Stream:
|
||||
def __init__(self, messages):
|
||||
self.messages = messages
|
||||
|
||||
def read_bytes(self, *args, **kwargs):
|
||||
if self.messages:
|
||||
msg = self.messages.pop(0)
|
||||
future = tornado.concurrent.Future()
|
||||
future.set_result(msg)
|
||||
return future
|
||||
raise tornado.iostream.StreamClosedError()
|
||||
|
||||
stream = Stream(messages)
|
||||
address = "client address"
|
||||
|
||||
await server.handle_stream(stream, address)
|
||||
|
||||
# Let loop iterate so callback gets called
|
||||
await tornado.gen.sleep(0.01)
|
||||
|
||||
assert received
|
||||
assert [msg] == received
|
||||
|
||||
|
||||
async def test_salt_message_server_exception(master_opts, io_loop):
|
||||
received = []
|
||||
|
||||
def handler(stream, body, header):
|
||||
|
||||
received.append(body)
|
||||
|
||||
stream = MagicMock()
|
||||
stream.read_bytes = MagicMock(
|
||||
side_effect=[
|
||||
Exception("Something went wrong"),
|
||||
]
|
||||
)
|
||||
address = "client address"
|
||||
server = salt.transport.tcp.SaltMessageServer(handler)
|
||||
await server.handle_stream(stream, address)
|
||||
stream.close.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_squash_exepected_message_client_warning")
|
||||
async def test_message_client_stream_return_exception(minion_opts, io_loop):
|
||||
msg = {"foo": "bar"}
|
||||
payload = salt.transport.frame.frame_msg(msg)
|
||||
future = tornado.concurrent.Future()
|
||||
future.set_result(payload)
|
||||
client = salt.transport.tcp.MessageClient(
|
||||
minion_opts,
|
||||
"127.0.0.1",
|
||||
12345,
|
||||
connect_callback=MagicMock(),
|
||||
disconnect_callback=MagicMock(),
|
||||
)
|
||||
client._stream = MagicMock()
|
||||
client._stream.read_bytes.side_effect = [
|
||||
future,
|
||||
]
|
||||
try:
|
||||
io_loop.add_callback(client._stream_return)
|
||||
await tornado.gen.sleep(0.01)
|
||||
client.close()
|
||||
await tornado.gen.sleep(0.01)
|
||||
assert client._stream is None
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
|
||||
def test_tcp_pub_server_pre_fork(master_opts):
|
||||
process_manager = MagicMock()
|
||||
server = salt.transport.tcp.TCPPublishServer(master_opts)
|
||||
server.pre_fork(process_manager)
|
||||
|
||||
|
||||
async def test_pub_server_publish_payload(master_opts, io_loop):
|
||||
server = salt.transport.tcp.PubServer(master_opts, io_loop=io_loop)
|
||||
package = {"foo": "bar"}
|
||||
topic_list = ["meh"]
|
||||
future = tornado.concurrent.Future()
|
||||
future.set_result(None)
|
||||
client = MagicMock()
|
||||
client.stream = MagicMock()
|
||||
client.stream.write.side_effect = [future]
|
||||
client.id_ = "meh"
|
||||
server.clients = [client]
|
||||
await server.publish_payload(package, topic_list)
|
||||
client.stream.write.assert_called_once()
|
||||
|
||||
|
||||
async def test_pub_server_publish_payload_closed_stream(master_opts, io_loop):
|
||||
server = salt.transport.tcp.PubServer(master_opts, io_loop=io_loop)
|
||||
package = {"foo": "bar"}
|
||||
topic_list = ["meh"]
|
||||
client = MagicMock()
|
||||
client.stream = MagicMock()
|
||||
client.stream.write.side_effect = [
|
||||
tornado.iostream.StreamClosedError("mock"),
|
||||
]
|
||||
client.id_ = "meh"
|
||||
server.clients = {client}
|
||||
await server.publish_payload(package, topic_list)
|
||||
assert server.clients == set()
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
import msgpack
|
||||
import pytest
|
||||
import tornado.concurrent
|
||||
|
||||
import salt.config
|
||||
import salt.transport.zeromq
|
||||
from tests.support.mock import MagicMock
|
||||
|
||||
|
||||
async def test_req_server_garbage_request(io_loop):
|
||||
|
@ -37,20 +35,21 @@ async def test_client_timeout_msg(minion_opts):
|
|||
client = salt.transport.zeromq.AsyncReqMessageClient(
|
||||
minion_opts, "tcp://127.0.0.1:4506"
|
||||
)
|
||||
assert hasattr(client, "_future")
|
||||
assert client._future is None
|
||||
future = tornado.concurrent.Future()
|
||||
client._future = future
|
||||
client.timeout_message(future)
|
||||
with pytest.raises(salt.exceptions.SaltReqTimeoutError):
|
||||
await future
|
||||
assert client._future is None
|
||||
client.connect()
|
||||
try:
|
||||
with pytest.raises(salt.exceptions.SaltReqTimeoutError):
|
||||
await client.send({"meh": "bah"}, 1)
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
future_a = tornado.concurrent.Future()
|
||||
future_b = tornado.concurrent.Future()
|
||||
future_b.set_exception = MagicMock()
|
||||
client._future = future_a
|
||||
client.timeout_message(future_b)
|
||||
|
||||
assert client._future == future_a
|
||||
future_b.set_exception.assert_not_called()
|
||||
def test_pub_client_init(minion_opts, io_loop):
|
||||
minion_opts["id"] = "minion"
|
||||
minion_opts["__role"] = "syndic"
|
||||
minion_opts["master_ip"] = "127.0.0.1"
|
||||
minion_opts["zmq_filtering"] = True
|
||||
minion_opts["zmq_monitor"] = True
|
||||
with salt.transport.zeromq.PublishClient(
|
||||
minion_opts, io_loop, host=minion_opts["master_ip"], port=121212
|
||||
) as client:
|
||||
client.send(b"asf")
|
||||
|
|
File diff suppressed because it is too large
Load diff
180
tests/pytests/unit/utils/test_listdiffer.py
Normal file
180
tests/pytests/unit/utils/test_listdiffer.py
Normal file
|
@ -0,0 +1,180 @@
|
|||
import pytest
|
||||
|
||||
from salt.utils.dictdiffer import RecursiveDictDiffer
|
||||
from salt.utils.listdiffer import list_diff
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_old_list():
|
||||
return [
|
||||
{"key": 1, "value": "foo1", "int_value": 101},
|
||||
{"key": 2, "value": "foo2", "int_value": 102},
|
||||
{"key": 3, "value": "foo3", "int_value": 103},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_new_list():
|
||||
return [
|
||||
{"key": 1, "value": "foo1", "int_value": 101},
|
||||
{"key": 2, "value": "foo2", "int_value": 112},
|
||||
{"key": 5, "value": "foo5", "int_value": 105},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_list_diff(get_old_list, get_new_list):
|
||||
return list_diff(get_old_list, get_new_list, key="key")
|
||||
|
||||
|
||||
def test_added(get_list_diff):
|
||||
assert len(get_list_diff.added) == 1
|
||||
assert get_list_diff.added[0] == {"key": 5, "value": "foo5", "int_value": 105}
|
||||
|
||||
|
||||
def test_removed(get_list_diff):
|
||||
assert len(get_list_diff.removed) == 1
|
||||
assert get_list_diff.removed[0] == {"key": 3, "value": "foo3", "int_value": 103}
|
||||
|
||||
|
||||
def test_diffs(get_list_diff):
|
||||
assert len(get_list_diff.diffs) == 3
|
||||
assert get_list_diff.diffs[0] == {2: {"int_value": {"new": 112, "old": 102}}}
|
||||
|
||||
# Added items
|
||||
assert get_list_diff.diffs[1] == {
|
||||
5: {
|
||||
"int_value": {"new": 105, "old": RecursiveDictDiffer.NONE_VALUE},
|
||||
"key": {"new": 5, "old": RecursiveDictDiffer.NONE_VALUE},
|
||||
"value": {"new": "foo5", "old": RecursiveDictDiffer.NONE_VALUE},
|
||||
}
|
||||
}
|
||||
|
||||
# Removed items
|
||||
assert get_list_diff.diffs[2] == {
|
||||
3: {
|
||||
"int_value": {"new": RecursiveDictDiffer.NONE_VALUE, "old": 103},
|
||||
"key": {"new": RecursiveDictDiffer.NONE_VALUE, "old": 3},
|
||||
"value": {"new": RecursiveDictDiffer.NONE_VALUE, "old": "foo3"},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_new_values(get_list_diff):
|
||||
assert len(get_list_diff.new_values) == 2
|
||||
assert get_list_diff.new_values[0] == {"key": 2, "int_value": 112}
|
||||
assert get_list_diff.new_values[1] == {"key": 5, "value": "foo5", "int_value": 105}
|
||||
|
||||
|
||||
def test_old_values(get_list_diff):
|
||||
assert len(get_list_diff.old_values) == 2
|
||||
assert get_list_diff.old_values[0] == {"key": 2, "int_value": 102}
|
||||
assert get_list_diff.old_values[1] == {"key": 3, "value": "foo3", "int_value": 103}
|
||||
|
||||
|
||||
def test_changed_all(get_list_diff):
|
||||
assert get_list_diff.changed(selection="all") == [
|
||||
"key.2.int_value",
|
||||
"key.5.int_value",
|
||||
"key.5.value",
|
||||
"key.3.int_value",
|
||||
"key.3.value",
|
||||
]
|
||||
|
||||
|
||||
def test_changed_intersect(get_list_diff):
|
||||
assert get_list_diff.changed(selection="intersect") == ["key.2.int_value"]
|
||||
|
||||
|
||||
def test_changes_str(get_list_diff):
|
||||
expected = """\tidentified by key 2:
|
||||
\tint_value from 102 to 112
|
||||
\tidentified by key 3:
|
||||
\twill be removed
|
||||
\tidentified by key 5:
|
||||
\twill be added
|
||||
"""
|
||||
assert get_list_diff.changes_str == expected
|
||||
|
||||
|
||||
def test_intersect(get_list_diff):
|
||||
expected = [
|
||||
{
|
||||
"key": 1,
|
||||
"old": {"key": 1, "value": "foo1", "int_value": 101},
|
||||
"new": {"key": 1, "value": "foo1", "int_value": 101},
|
||||
},
|
||||
{
|
||||
"key": 2,
|
||||
"old": {"key": 2, "value": "foo2", "int_value": 102},
|
||||
"new": {"key": 2, "value": "foo2", "int_value": 112},
|
||||
},
|
||||
]
|
||||
test_isect = get_list_diff.intersect
|
||||
assert test_isect == expected
|
||||
|
||||
|
||||
def test_remove_diff_intersect(get_list_diff):
|
||||
expected = [
|
||||
{
|
||||
"key": 1,
|
||||
"old": {"key": 1, "int_value": 101},
|
||||
"new": {"key": 1, "int_value": 101},
|
||||
},
|
||||
{
|
||||
"key": 2,
|
||||
"old": {"key": 2, "int_value": 102},
|
||||
"new": {"key": 2, "int_value": 112},
|
||||
},
|
||||
]
|
||||
|
||||
get_list_diff.remove_diff(diff_key="value")
|
||||
test_isect = get_list_diff.intersect
|
||||
assert test_isect == expected
|
||||
|
||||
|
||||
def test_remove_diff_removed(get_list_diff):
|
||||
expected = [
|
||||
{
|
||||
"key": 1,
|
||||
"old": {"key": 1, "value": "foo1", "int_value": 101},
|
||||
"new": {"key": 1, "value": "foo1", "int_value": 101},
|
||||
},
|
||||
{
|
||||
"key": 2,
|
||||
"old": {"key": 2, "value": "foo2", "int_value": 102},
|
||||
"new": {"key": 2, "value": "foo2", "int_value": 112},
|
||||
},
|
||||
]
|
||||
get_list_diff.remove_diff(diff_key="value", diff_list="removed")
|
||||
test_isect = get_list_diff.intersect
|
||||
assert test_isect == expected
|
||||
|
||||
|
||||
def test_changes_str2(get_list_diff):
|
||||
expected = """ key=2 (updated):
|
||||
int_value from 102 to 112
|
||||
key=3 (removed)
|
||||
key=5 (added): {'key': 5, 'value': 'foo5', 'int_value': 105}"""
|
||||
test_changes = get_list_diff.changes_str2
|
||||
assert test_changes == expected
|
||||
|
||||
|
||||
def test_current_list(get_list_diff):
|
||||
expected = [
|
||||
{"key": 1, "value": "foo1", "int_value": 101},
|
||||
{"key": 2, "value": "foo2", "int_value": 102},
|
||||
{"key": 3, "value": "foo3", "int_value": 103},
|
||||
]
|
||||
test_curr_list = get_list_diff.current_list
|
||||
assert test_curr_list == expected
|
||||
|
||||
|
||||
def test_new_list(get_list_diff):
|
||||
expected = [
|
||||
{"key": 1, "value": "foo1", "int_value": 101},
|
||||
{"key": 2, "value": "foo2", "int_value": 112},
|
||||
{"key": 5, "value": "foo5", "int_value": 105},
|
||||
]
|
||||
test_new_list = get_list_diff.new_list
|
||||
assert test_new_list == expected
|
File diff suppressed because it is too large
Load diff
|
@ -1,109 +0,0 @@
|
|||
from salt.utils import dictdiffer
|
||||
from salt.utils.listdiffer import list_diff
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
NONE = dictdiffer.RecursiveDictDiffer.NONE_VALUE
|
||||
|
||||
|
||||
class ListDictDifferTestCase(TestCase):
|
||||
def setUp(self):
|
||||
old_list = [
|
||||
{"key": 1, "value": "foo1", "int_value": 101},
|
||||
{"key": 2, "value": "foo2", "int_value": 102},
|
||||
{"key": 3, "value": "foo3", "int_value": 103},
|
||||
]
|
||||
new_list = [
|
||||
{"key": 1, "value": "foo1", "int_value": 101},
|
||||
{"key": 2, "value": "foo2", "int_value": 112},
|
||||
{"key": 5, "value": "foo5", "int_value": 105},
|
||||
]
|
||||
self.list_diff = list_diff(old_list, new_list, key="key")
|
||||
|
||||
def tearDown(self):
|
||||
for attrname in ("list_diff",):
|
||||
try:
|
||||
delattr(self, attrname)
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
def test_added(self):
|
||||
self.assertEqual(len(self.list_diff.added), 1)
|
||||
self.assertDictEqual(
|
||||
self.list_diff.added[0], {"key": 5, "value": "foo5", "int_value": 105}
|
||||
)
|
||||
|
||||
def test_removed(self):
|
||||
self.assertEqual(len(self.list_diff.removed), 1)
|
||||
self.assertDictEqual(
|
||||
self.list_diff.removed[0], {"key": 3, "value": "foo3", "int_value": 103}
|
||||
)
|
||||
|
||||
def test_diffs(self):
|
||||
self.assertEqual(len(self.list_diff.diffs), 3)
|
||||
self.assertDictEqual(
|
||||
self.list_diff.diffs[0], {2: {"int_value": {"new": 112, "old": 102}}}
|
||||
)
|
||||
self.assertDictEqual(
|
||||
self.list_diff.diffs[1],
|
||||
# Added items
|
||||
{
|
||||
5: {
|
||||
"int_value": {"new": 105, "old": NONE},
|
||||
"key": {"new": 5, "old": NONE},
|
||||
"value": {"new": "foo5", "old": NONE},
|
||||
}
|
||||
},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
self.list_diff.diffs[2],
|
||||
# Removed items
|
||||
{
|
||||
3: {
|
||||
"int_value": {"new": NONE, "old": 103},
|
||||
"key": {"new": NONE, "old": 3},
|
||||
"value": {"new": NONE, "old": "foo3"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_new_values(self):
|
||||
self.assertEqual(len(self.list_diff.new_values), 2)
|
||||
self.assertDictEqual(self.list_diff.new_values[0], {"key": 2, "int_value": 112})
|
||||
self.assertDictEqual(
|
||||
self.list_diff.new_values[1], {"key": 5, "value": "foo5", "int_value": 105}
|
||||
)
|
||||
|
||||
def test_old_values(self):
|
||||
self.assertEqual(len(self.list_diff.old_values), 2)
|
||||
self.assertDictEqual(self.list_diff.old_values[0], {"key": 2, "int_value": 102})
|
||||
self.assertDictEqual(
|
||||
self.list_diff.old_values[1], {"key": 3, "value": "foo3", "int_value": 103}
|
||||
)
|
||||
|
||||
def test_changed_all(self):
|
||||
self.assertEqual(
|
||||
self.list_diff.changed(selection="all"),
|
||||
[
|
||||
"key.2.int_value",
|
||||
"key.5.int_value",
|
||||
"key.5.value",
|
||||
"key.3.int_value",
|
||||
"key.3.value",
|
||||
],
|
||||
)
|
||||
|
||||
def test_changed_intersect(self):
|
||||
self.assertEqual(
|
||||
self.list_diff.changed(selection="intersect"), ["key.2.int_value"]
|
||||
)
|
||||
|
||||
def test_changes_str(self):
|
||||
self.assertEqual(
|
||||
self.list_diff.changes_str,
|
||||
"\tidentified by key 2:\n"
|
||||
"\tint_value from 102 to 112\n"
|
||||
"\tidentified by key 3:\n"
|
||||
"\twill be removed\n"
|
||||
"\tidentified by key 5:\n"
|
||||
"\twill be added\n",
|
||||
)
|
20
tools/ci.py
20
tools/ci.py
|
@ -18,6 +18,7 @@ import yaml
|
|||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
import tools.utils.gh
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
|
@ -622,9 +623,12 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
|||
"full": {
|
||||
"help": "Full test run",
|
||||
},
|
||||
"workflow": {
|
||||
"help": "Which workflow is running",
|
||||
},
|
||||
},
|
||||
)
|
||||
def matrix(ctx: Context, distro_slug: str, full: bool = False):
|
||||
def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = "ci"):
|
||||
"""
|
||||
Generate the test matrix.
|
||||
"""
|
||||
|
@ -635,6 +639,15 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False):
|
|||
"scenarios": 1,
|
||||
"unit": 2,
|
||||
}
|
||||
# On nightly and scheduled builds we don't want splits at all
|
||||
if workflow.lower() in ("nightly", "scheduled"):
|
||||
ctx.info(f"Reducing splits definition since workflow is '{workflow}'")
|
||||
for key in _splits:
|
||||
new_value = _splits[key] - 2
|
||||
if new_value < 1:
|
||||
new_value = 1
|
||||
_splits[key] = new_value
|
||||
|
||||
for transport in ("zeromq", "tcp"):
|
||||
if transport == "tcp":
|
||||
if distro_slug not in (
|
||||
|
@ -964,8 +977,9 @@ def _get_pr_test_labels_from_api(
|
|||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
}
|
||||
if "GITHUB_TOKEN" in os.environ:
|
||||
headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}"
|
||||
github_token = tools.utils.gh.get_github_token(ctx)
|
||||
if github_token is not None:
|
||||
headers["Authorization"] = f"Bearer {github_token}"
|
||||
web.headers.update(headers)
|
||||
ret = web.get(f"https://api.github.com/repos/{repository}/pulls/{pr}")
|
||||
if ret.status_code != 200:
|
||||
|
|
|
@ -467,7 +467,12 @@ def github(
|
|||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"release-messsage-file={release_message_path.resolve()}\n")
|
||||
|
||||
releases = get_salt_releases(ctx, repository)
|
||||
try:
|
||||
releases = get_salt_releases(ctx, repository)
|
||||
except SystemExit:
|
||||
ctx.warn(f"Failed to get salt releases from repository '{repository}'")
|
||||
releases = get_salt_releases(ctx, "saltstack/salt")
|
||||
|
||||
if Version(salt_version) >= releases[-1]:
|
||||
make_latest = True
|
||||
else:
|
||||
|
|
|
@ -137,13 +137,19 @@ def get_salt_releases(ctx: Context, repository: str) -> list[Version]:
|
|||
"""
|
||||
Return a list of salt versions
|
||||
"""
|
||||
# Deferred import
|
||||
import tools.utils.gh
|
||||
|
||||
ctx.info(f"Collecting salt releases from repository '{repository}'")
|
||||
|
||||
versions = set()
|
||||
with ctx.web as web:
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
}
|
||||
if "GITHUB_TOKEN" in os.environ:
|
||||
headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}"
|
||||
github_token = tools.utils.gh.get_github_token(ctx)
|
||||
if github_token is not None:
|
||||
headers["Authorization"] = f"Bearer {github_token}"
|
||||
web.headers.update(headers)
|
||||
ret = web.get(f"https://api.github.com/repos/{repository}/tags")
|
||||
if ret.status_code != 200:
|
||||
|
|
|
@ -218,11 +218,20 @@ def get_github_token(ctx: Context) -> str | None:
|
|||
Get the GITHUB_TOKEN to be able to authenticate to the API.
|
||||
"""
|
||||
github_token = os.environ.get("GITHUB_TOKEN")
|
||||
if github_token is None:
|
||||
gh = shutil.which("gh")
|
||||
ret = ctx.run(gh, "auth", "token", check=False, capture=True)
|
||||
if ret.returncode == 0:
|
||||
github_token = ret.stdout.decode().strip() or None
|
||||
if github_token is not None:
|
||||
ctx.info("$GITHUB_TOKEN was found on the environ")
|
||||
return github_token
|
||||
|
||||
gh = shutil.which("gh")
|
||||
if gh is None:
|
||||
ctx.info("The 'gh' CLI tool is not available. Can't get a token using it.")
|
||||
return github_token
|
||||
|
||||
ret = ctx.run(gh, "auth", "token", check=False, capture=True)
|
||||
if ret.returncode == 0:
|
||||
ctx.info("Got the GitHub token from the 'gh' CLI tool")
|
||||
return ret.stdout.decode().strip() or None
|
||||
ctx.info("Failed to get the GitHub token from the 'gh' CLI tool")
|
||||
return github_token
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue