Merge pull request #66520 from s0undt3ch/hotfix/merge-forward-into-master

[master] Merge 3007.x into master
This commit is contained in:
Pedro Algarvio 2024-05-16 08:42:40 +01:00 committed by GitHub
commit fe47a95b28
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 1495 additions and 595 deletions

View file

@ -1008,7 +1008,7 @@ jobs:
ubuntu-2204-arm64-pkg-tests:
name: Ubuntu 22.04 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04-arm64') }}
needs:
- prepare-workflow
- build-pkgs-onedir
@ -1027,6 +1027,48 @@ jobs:
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-pkg-tests:
name: Ubuntu 24.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-24.04') }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-arm64-pkg-tests:
name: Ubuntu 24.04 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1642,15 +1684,15 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
fedora-39:
name: Fedora 39 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'fedora-39') }}
fedora-40:
name: Fedora 40 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'fedora-40') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: fedora-39
distro-slug: fedora-40
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1921,7 +1963,7 @@ jobs:
ubuntu-2204-arm64:
name: Ubuntu 22.04 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04-arm64') }}
needs:
- prepare-workflow
- build-ci-deps
@ -1940,6 +1982,48 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404:
name: Ubuntu 24.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-24.04') }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404-arm64:
name: Ubuntu 24.04 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
combine-all-code-coverage:
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
@ -1967,7 +2051,7 @@ jobs:
- debian-11-arm64
- debian-12
- debian-12-arm64
- fedora-39
- fedora-40
- opensuse-15
- photonos-4
- photonos-4-arm64
@ -1981,6 +2065,8 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- ubuntu-2404
- ubuntu-2404-arm64
steps:
- uses: actions/checkout@v4
@ -2133,7 +2219,7 @@ jobs:
- debian-11-arm64
- debian-12
- debian-12-arm64
- fedora-39
- fedora-40
- opensuse-15
- photonos-4
- photonos-4-arm64
@ -2147,6 +2233,8 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- ubuntu-2404
- ubuntu-2404-arm64
- rockylinux-8-pkg-tests
- rockylinux-8-arm64-pkg-tests
- rockylinux-9-pkg-tests
@ -2172,6 +2260,8 @@ jobs:
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests
- ubuntu-2204-arm64-pkg-tests
- ubuntu-2404-pkg-tests
- ubuntu-2404-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-arm64-pkg-tests

View file

@ -1092,6 +1092,48 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-pkg-tests:
name: Ubuntu 24.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-arm64-pkg-tests:
name: Ubuntu 24.04 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1707,15 +1749,15 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
fedora-39:
name: Fedora 39 Test
fedora-40:
name: Fedora 40 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: fedora-39
distro-slug: fedora-40
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -2005,6 +2047,48 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404:
name: Ubuntu 24.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404-arm64:
name: Ubuntu 24.04 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
combine-all-code-coverage:
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
@ -2032,7 +2116,7 @@ jobs:
- debian-11-arm64
- debian-12
- debian-12-arm64
- fedora-39
- fedora-40
- opensuse-15
- photonos-4
- photonos-4-arm64
@ -2046,6 +2130,8 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- ubuntu-2404
- ubuntu-2404-arm64
steps:
- uses: actions/checkout@v4
@ -2315,11 +2401,11 @@ jobs:
arch: arm64
- pkg-type: deb
distro: ubuntu
version: "23.04"
version: "24.04"
arch: x86_64
- pkg-type: deb
distro: ubuntu
version: "23.04"
version: "24.04"
arch: arm64
steps:
@ -2438,15 +2524,15 @@ jobs:
arch: aarch64
- pkg-type: rpm
distro: fedora
version: "39"
version: "40"
arch: x86_64
- pkg-type: rpm
distro: fedora
version: "39"
version: "40"
arch: arm64
- pkg-type: rpm
distro: fedora
version: "39"
version: "40"
arch: aarch64
- pkg-type: rpm
distro: photon
@ -2950,7 +3036,7 @@ jobs:
- debian-11-arm64
- debian-12
- debian-12-arm64
- fedora-39
- fedora-40
- opensuse-15
- photonos-4
- photonos-4-arm64
@ -2964,6 +3050,8 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- ubuntu-2404
- ubuntu-2404-arm64
steps:
@ -3047,6 +3135,8 @@ jobs:
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests
- ubuntu-2204-arm64-pkg-tests
- ubuntu-2404-pkg-tests
- ubuntu-2404-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-arm64-pkg-tests

View file

@ -1074,6 +1074,48 @@ jobs:
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-pkg-tests:
name: Ubuntu 24.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-arm64-pkg-tests:
name: Ubuntu 24.04 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1689,15 +1731,15 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
fedora-39:
name: Fedora 39 Test
fedora-40:
name: Fedora 40 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: fedora-39
distro-slug: fedora-40
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1987,6 +2029,48 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404:
name: Ubuntu 24.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404-arm64:
name: Ubuntu 24.04 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
combine-all-code-coverage:
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }}
@ -2014,7 +2098,7 @@ jobs:
- debian-11-arm64
- debian-12
- debian-12-arm64
- fedora-39
- fedora-40
- opensuse-15
- photonos-4
- photonos-4-arm64
@ -2028,6 +2112,8 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- ubuntu-2404
- ubuntu-2404-arm64
steps:
- uses: actions/checkout@v4
@ -2182,7 +2268,7 @@ jobs:
- debian-11-arm64
- debian-12
- debian-12-arm64
- fedora-39
- fedora-40
- opensuse-15
- photonos-4
- photonos-4-arm64
@ -2196,6 +2282,8 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- ubuntu-2404
- ubuntu-2404-arm64
- rockylinux-8-pkg-tests
- rockylinux-8-arm64-pkg-tests
- rockylinux-9-pkg-tests
@ -2221,6 +2309,8 @@ jobs:
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests
- ubuntu-2204-arm64-pkg-tests
- ubuntu-2404-pkg-tests
- ubuntu-2404-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-arm64-pkg-tests

View file

@ -1074,6 +1074,48 @@ jobs:
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-pkg-tests:
name: Ubuntu 24.04 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
ubuntu-2404-arm64-pkg-tests:
name: Ubuntu 24.04 Arm64 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: deb
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
macos-12-pkg-tests:
name: macOS 12 Package Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
@ -1689,15 +1731,15 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
fedora-39:
name: Fedora 39 Test
fedora-40:
name: Fedora 40 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: fedora-39
distro-slug: fedora-40
nox-session: ci-test-onedir
platform: linux
arch: x86_64
@ -1987,6 +2029,48 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404:
name: Ubuntu 24.04 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: true
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
ubuntu-2404-arm64:
name: Ubuntu 24.04 Arm64 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: ubuntu-24.04-arm64
nox-session: ci-test-onedir
platform: linux
arch: arm64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: true
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
build-src-repo:
name: Build Repository
environment: staging
@ -2140,11 +2224,11 @@ jobs:
arch: arm64
- pkg-type: deb
distro: ubuntu
version: "23.04"
version: "24.04"
arch: x86_64
- pkg-type: deb
distro: ubuntu
version: "23.04"
version: "24.04"
arch: arm64
steps:
@ -2263,15 +2347,15 @@ jobs:
arch: aarch64
- pkg-type: rpm
distro: fedora
version: "39"
version: "40"
arch: x86_64
- pkg-type: rpm
distro: fedora
version: "39"
version: "40"
arch: arm64
- pkg-type: rpm
distro: fedora
version: "39"
version: "40"
arch: aarch64
- pkg-type: rpm
distro: photon
@ -2909,7 +2993,7 @@ jobs:
- debian-11-arm64
- debian-12
- debian-12-arm64
- fedora-39
- fedora-40
- opensuse-15
- photonos-4
- photonos-4-arm64
@ -2923,6 +3007,8 @@ jobs:
- ubuntu-2004-arm64
- ubuntu-2204
- ubuntu-2204-arm64
- ubuntu-2404
- ubuntu-2404-arm64
- rockylinux-8-pkg-tests
- rockylinux-8-arm64-pkg-tests
- rockylinux-9-pkg-tests
@ -2948,6 +3034,8 @@ jobs:
- ubuntu-2004-arm64-pkg-tests
- ubuntu-2204-pkg-tests
- ubuntu-2204-arm64-pkg-tests
- ubuntu-2404-pkg-tests
- ubuntu-2404-arm64-pkg-tests
- macos-12-pkg-tests
- macos-13-pkg-tests
- macos-13-arm64-pkg-tests

5
changelog/61166.fixed.md Normal file
View file

@ -0,0 +1,5 @@
Fixes multiple issues with the cmd module on Windows. Scripts are called using
the ``-File`` parameter to the ``powershell.exe`` binary. ``CLIXML`` data in
stderr is now removed (only applies to encoded commands). Commands can now be
sent to ``cmd.powershell`` as a list. Makes sure JSON data returned is valid.
Strips whitespace from the return when using ``runas``.

2
changelog/61534.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed the win_lgpo_netsh salt util to handle non-English systems. This was a
rewrite to use PowerShell instead of netsh to make the changes on the system

1
changelog/65295.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix typo in nftables module to ensure unique nft family values

1
changelog/65837.fixed.md Normal file
View file

@ -0,0 +1 @@
Corrected x509_v2 CRL creation `last_update` and `next_update` values when system timezone is not UTC

1
changelog/66180.added.md Normal file
View file

@ -0,0 +1 @@
Add Ubuntu 24.04 support

1
changelog/66300.added.md Normal file
View file

@ -0,0 +1 @@
Add Fedora 40 support, replacing Fedora 39

1
changelog/66382.fixed.md Normal file
View file

@ -0,0 +1 @@
Fixed nftables.build_rule breaks ipv6 rules by using the wrong syntax for source and destination addresses

View file

@ -1,8 +1,8 @@
{
"amazonlinux-2-arm64": {
"ami": "ami-0fa1d515b17aa5832",
"ami": "ami-0c98c023fba59d522",
"ami_description": "CI Image of AmazonLinux 2 arm64",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20240325.2133",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -10,9 +10,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2": {
"ami": "ami-0c9a41917d788911e",
"ami": "ami-02cba95cfd7074794",
"ami_description": "CI Image of AmazonLinux 2 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240325.2133",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -20,9 +20,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023-arm64": {
"ami": "ami-00644e6cc81cb8fc0",
"ami": "ami-0609f0e98f5a6b73d",
"ami_description": "CI Image of AmazonLinux 2023 arm64",
"ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240325.2133",
"ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240509.1529",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -30,9 +30,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023": {
"ami": "ami-01ba1cac2a9ba4845",
"ami": "ami-0554a801eb6dcc42c",
"ami_description": "CI Image of AmazonLinux 2023 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240325.2133",
"ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240509.1529",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -40,9 +40,9 @@
"ssh_username": "ec2-user"
},
"archlinux-lts": {
"ami": "ami-01f729941d3262787",
"ami": "ami-01ad78f19930b9747",
"ami_description": "CI Image of ArchLinux lts x86_64",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20240209.1843",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "false",
"instance_type": "t3a.large",
@ -50,9 +50,9 @@
"ssh_username": "arch"
},
"centos-7-arm64": {
"ami": "ami-0a0c4ce5d61416643",
"ami": "ami-0ef52419c91cb0169",
"ami_description": "CI Image of CentOS 7 arm64",
"ami_name": "salt-project/ci/centos/7/arm64/20240325.2134",
"ami_name": "salt-project/ci/centos/7/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -60,9 +60,9 @@
"ssh_username": "centos"
},
"centos-7": {
"ami": "ami-06fec7a8fe157fe7d",
"ami": "ami-0973c8d1b91dcba5c",
"ami_description": "CI Image of CentOS 7 x86_64",
"ami_name": "salt-project/ci/centos/7/x86_64/20240325.2134",
"ami_name": "salt-project/ci/centos/7/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -70,9 +70,9 @@
"ssh_username": "centos"
},
"debian-11-arm64": {
"ami": "ami-0e1d6f34aaeba1e58",
"ami": "ami-0eff227d9a94d8692",
"ami_description": "CI Image of Debian 11 arm64",
"ami_name": "salt-project/ci/debian/11/arm64/20240325.2134",
"ami_name": "salt-project/ci/debian/11/arm64/20240509.1529",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -80,9 +80,9 @@
"ssh_username": "admin"
},
"debian-11": {
"ami": "ami-012327dae48ce80ac",
"ami": "ami-099b2a5a1fb995166",
"ami_description": "CI Image of Debian 11 x86_64",
"ami_name": "salt-project/ci/debian/11/x86_64/20240325.2134",
"ami_name": "salt-project/ci/debian/11/x86_64/20240509.1529",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -90,9 +90,9 @@
"ssh_username": "admin"
},
"debian-12-arm64": {
"ami": "ami-0527ef47cece68f54",
"ami": "ami-0ab6b0cc8488f8880",
"ami_description": "CI Image of Debian 12 arm64",
"ami_name": "salt-project/ci/debian/12/arm64/20240325.2134",
"ami_name": "salt-project/ci/debian/12/arm64/20240509.1529",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -100,29 +100,29 @@
"ssh_username": "admin"
},
"debian-12": {
"ami": "ami-0d9d685ae10656958",
"ami": "ami-0e1f5b55325249c4e",
"ami_description": "CI Image of Debian 12 x86_64",
"ami_name": "salt-project/ci/debian/12/x86_64/20240325.2134",
"ami_name": "salt-project/ci/debian/12/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "admin"
},
"fedora-39-arm64": {
"ami": "ami-00d2f2e1fccac457d",
"ami_description": "CI Image of Fedora 39 arm64",
"ami_name": "salt-project/ci/fedora/39/arm64/20240325.2133",
"fedora-40-arm64": {
"ami": "ami-064df327a55f83953",
"ami_description": "CI Image of Fedora 40 arm64",
"ami_name": "salt-project/ci/fedora/40/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "fedora"
},
"fedora-39": {
"ami": "ami-072c01a40a6519153",
"ami_description": "CI Image of Fedora 39 x86_64",
"ami_name": "salt-project/ci/fedora/39/x86_64/20240325.2133",
"fedora-40": {
"ami": "ami-08d8dbd4f063788de",
"ami_description": "CI Image of Fedora 40 x86_64",
"ami_name": "salt-project/ci/fedora/40/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -130,9 +130,9 @@
"ssh_username": "fedora"
},
"opensuse-15": {
"ami": "ami-04cfbfd3c214348bc",
"ami": "ami-0f82d5ab3015af6ad",
"ami_description": "CI Image of Opensuse 15 x86_64",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20240325.2133",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20240509.1529",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -140,9 +140,9 @@
"ssh_username": "ec2-user"
},
"photonos-4-arm64": {
"ami": "ami-0bd76e6234ee685a7",
"ami": "ami-0ea152c346cb8e13b",
"ami_description": "CI Image of PhotonOS 4 arm64",
"ami_name": "salt-project/ci/photonos/4/arm64/20240325.2133",
"ami_name": "salt-project/ci/photonos/4/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -150,9 +150,9 @@
"ssh_username": "root"
},
"photonos-4": {
"ami": "ami-0b1947785de4b2a6e",
"ami": "ami-09b55d0bf3a1aa7e5",
"ami_description": "CI Image of PhotonOS 4 x86_64",
"ami_name": "salt-project/ci/photonos/4/x86_64/20240325.2134",
"ami_name": "salt-project/ci/photonos/4/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -160,9 +160,9 @@
"ssh_username": "root"
},
"photonos-5-arm64": {
"ami": "ami-0d02f34b9820752e4",
"ami": "ami-09de4952bc9fc068a",
"ami_description": "CI Image of PhotonOS 5 arm64",
"ami_name": "salt-project/ci/photonos/5/arm64/20240325.2133",
"ami_name": "salt-project/ci/photonos/5/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -170,9 +170,9 @@
"ssh_username": "root"
},
"photonos-5": {
"ami": "ami-0fd58f07139e9622e",
"ami": "ami-0c3375a583643fc77",
"ami_description": "CI Image of PhotonOS 5 x86_64",
"ami_name": "salt-project/ci/photonos/5/x86_64/20240325.2134",
"ami_name": "salt-project/ci/photonos/5/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -180,9 +180,9 @@
"ssh_username": "root"
},
"rockylinux-8-arm64": {
"ami": "ami-0e5d23f57141e5ac4",
"ami": "ami-0662cc201cada14b8",
"ami_description": "CI Image of RockyLinux 8 arm64",
"ami_name": "salt-project/ci/rockylinux/8/arm64/20240325.2134",
"ami_name": "salt-project/ci/rockylinux/8/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -190,9 +190,9 @@
"ssh_username": "rocky"
},
"rockylinux-8": {
"ami": "ami-0e2cac6a847d700aa",
"ami": "ami-071ca70a907d79e05",
"ami_description": "CI Image of RockyLinux 8 x86_64",
"ami_name": "salt-project/ci/rockylinux/8/x86_64/20240325.2134",
"ami_name": "salt-project/ci/rockylinux/8/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -200,9 +200,9 @@
"ssh_username": "rocky"
},
"rockylinux-9-arm64": {
"ami": "ami-0054d3b25a08d2b41",
"ami": "ami-065842dfdf03a1a03",
"ami_description": "CI Image of RockyLinux 9 arm64",
"ami_name": "salt-project/ci/rockylinux/9/arm64/20240325.2134",
"ami_name": "salt-project/ci/rockylinux/9/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -210,9 +210,9 @@
"ssh_username": "rocky"
},
"rockylinux-9": {
"ami": "ami-042d3b81138968bdb",
"ami": "ami-09f5d6df00e99ba16",
"ami_description": "CI Image of RockyLinux 9 x86_64",
"ami_name": "salt-project/ci/rockylinux/9/x86_64/20240325.2134",
"ami_name": "salt-project/ci/rockylinux/9/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -220,9 +220,9 @@
"ssh_username": "rocky"
},
"ubuntu-20.04-arm64": {
"ami": "ami-0bd2e3ee99c5a2f52",
"ami": "ami-00171fa604b826054",
"ami_description": "CI Image of Ubuntu 20.04 arm64",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240325.2134",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -230,9 +230,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-20.04": {
"ami": "ami-0fdc19cb94bc96db3",
"ami": "ami-07ddfbdc489064022",
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240325.2134",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -240,9 +240,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04-arm64": {
"ami": "ami-0690e86bc116a6245",
"ami": "ami-0e6b6fc1dd298e055",
"ami_description": "CI Image of Ubuntu 22.04 arm64",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240325.2134",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -250,29 +250,29 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04": {
"ami": "ami-0285c21e3abc8b2b2",
"ami": "ami-0736289579c0d01ba",
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240325.2134",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "ubuntu"
},
"ubuntu-23.04-arm64": {
"ami": "ami-09e0eb04bbf2a2f35",
"ami_description": "CI Image of Ubuntu 23.04 arm64",
"ami_name": "salt-project/ci/ubuntu/23.04/arm64/20240325.2134",
"ubuntu-24.04-arm64": {
"ami": "ami-015058823f69446b3",
"ami_description": "CI Image of Ubuntu 24.04 arm64",
"ami_name": "salt-project/ci/ubuntu/24.04/arm64/20240509.1530",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
"is_windows": "false",
"ssh_username": "ubuntu"
},
"ubuntu-23.04": {
"ami": "ami-029edca569b26d625",
"ami_description": "CI Image of Ubuntu 23.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20240325.2134",
"ubuntu-24.04": {
"ami": "ami-0eb04152e7cafaaf9",
"ami_description": "CI Image of Ubuntu 24.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/24.04/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -280,9 +280,9 @@
"ssh_username": "ubuntu"
},
"windows-2016": {
"ami": "ami-0474d8e7e13c81883",
"ami": "ami-06026cb4d83072df5",
"ami_description": "CI Image of Windows 2016 x86_64",
"ami_name": "salt-project/ci/windows/2016/x86_64/20240325.2133",
"ami_name": "salt-project/ci/windows/2016/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -290,9 +290,9 @@
"ssh_username": "Administrator"
},
"windows-2019": {
"ami": "ami-07afee87d071123bf",
"ami": "ami-095a9256ec0e8261c",
"ami_description": "CI Image of Windows 2019 x86_64",
"ami_name": "salt-project/ci/windows/2019/x86_64/20240325.2133",
"ami_name": "salt-project/ci/windows/2019/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -300,9 +300,9 @@
"ssh_username": "Administrator"
},
"windows-2022": {
"ami": "ami-08f69046343f92cc4",
"ami": "ami-0d295c0711e513c05",
"ami_description": "CI Image of Windows 2022 x86_64",
"ami_name": "salt-project/ci/windows/2022/x86_64/20240325.2133",
"ami_name": "salt-project/ci/windows/2022/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",

View file

@ -10,5 +10,5 @@ mandatory_os_slugs:
- archlinux-lts
- photonos-5-arm64
- macos-12
- ubuntu-22.04-arm64
- ubuntu-24.04-arm64
- windows-2022

View file

@ -256,32 +256,44 @@ def _check_avail(cmd):
return bret and wret
def _prep_powershell_cmd(shell, cmd, stack, encoded_cmd):
def _prep_powershell_cmd(win_shell, cmd, encoded_cmd):
"""
Prep cmd when shell is powershell
Prep cmd when shell is powershell. If we were called by script(), then fake
out the Windows shell to run a Powershell script. Otherwise, just run a
Powershell command.
"""
# Find the full path to the shell
win_shell = salt.utils.path.which(win_shell)
# If this is running on Windows wrap
# the shell in quotes in case there are
# spaces in the paths.
if salt.utils.platform.is_windows():
shell = f'"{shell}"'
if not win_shell:
raise CommandExecutionError("PowerShell binary not found")
new_cmd = [win_shell, "-NonInteractive", "-NoProfile", "-ExecutionPolicy", "Bypass"]
# extract_stack() returns a list of tuples.
# The last item in the list [-1] is the current method.
# The third item[2] in each tuple is the name of that method.
if stack[-2][2] == "script":
cmd = (
"{} -NonInteractive -NoProfile -ExecutionPolicy Bypass -Command {}".format(
shell, cmd
)
)
stack = traceback.extract_stack(limit=3)
if stack[-3][2] == "script":
# If this is cmd.script, then we're running a file
# You might be tempted to use -File here instead of -Command
# The problem with using -File is that any arguments that contain
# powershell commands themselves will not be evaluated
# See GitHub issue #56195
new_cmd.append("-Command")
if isinstance(cmd, list):
cmd = " ".join(cmd)
new_cmd.append(f"& {cmd.strip()}")
elif encoded_cmd:
cmd = f"{shell} -NonInteractive -NoProfile -EncodedCommand {cmd}"
new_cmd.extend(["-EncodedCommand", f"{cmd}"])
else:
cmd = f'{shell} -NonInteractive -NoProfile -Command "{cmd}"'
# Strip whitespace
if isinstance(cmd, list):
cmd = " ".join(cmd)
new_cmd.extend(["-Command", f"& {{{cmd.strip()}}}"])
return cmd
log.debug(new_cmd)
return new_cmd
def _run(
@ -384,19 +396,7 @@ def _run(
# The powershell core binary is "pwsh"
# you can also pass a path here as long as the binary name is one of the two
if any(word in shell.lower().strip() for word in ["powershell", "pwsh"]):
# Strip whitespace
if isinstance(cmd, str):
cmd = cmd.strip()
elif isinstance(cmd, list):
cmd = " ".join(cmd).strip()
cmd = cmd.replace('"', '\\"')
# If we were called by script(), then fakeout the Windows
# shell to run a Powershell script.
# Else just run a Powershell command.
stack = traceback.extract_stack(limit=2)
cmd = _prep_powershell_cmd(shell, cmd, stack, encoded_cmd)
cmd = _prep_powershell_cmd(shell, cmd, encoded_cmd)
# munge the cmd and cwd through the template
(cmd, cwd) = _render_cmd(cmd, cwd, template, saltenv, pillarenv, pillar_override)
@ -809,6 +809,9 @@ def _run(
_log_cmd(cmd),
)
# Encoded commands dump CLIXML data in stderr. It's not an actual error
if encoded_cmd and "CLIXML" in err:
err = ""
if rstrip:
if out is not None:
out = out.rstrip()
@ -1055,6 +1058,7 @@ def run(
ignore_retcode=False,
saltenv=None,
use_vt=False,
redirect_stderr=True,
bg=False,
password=None,
encoded_cmd=False,
@ -1190,6 +1194,12 @@ def run(
:param bool use_vt: Use VT utils (saltstack) to stream the command output
more interactively to the console and the logs. This is experimental.
:param bool redirect_stderr: If set to ``True``, then stderr will be
redirected to stdout. This is helpful for cases where obtaining both
the retcode and output is desired. Default is ``True``
.. versionadded:: 3006.9
:param bool encoded_cmd: Specify if the supplied command is encoded.
Only applies to shell 'powershell' and 'pwsh'.
@ -1301,6 +1311,7 @@ def run(
salt '*' cmd.run cmd='sed -e s/=/:/g'
"""
python_shell = _python_shell_default(python_shell, kwargs.get("__pub_jid", ""))
stderr = subprocess.STDOUT if redirect_stderr else subprocess.PIPE
ret = _run(
cmd,
runas=runas,
@ -1309,7 +1320,7 @@ def run(
python_shell=python_shell,
cwd=cwd,
stdin=stdin,
stderr=subprocess.STDOUT,
stderr=stderr,
env=env,
clean_env=clean_env,
prepend_path=prepend_path,
@ -4057,6 +4068,9 @@ def powershell(
else:
python_shell = True
if isinstance(cmd, list):
cmd = " ".join(cmd)
# Append PowerShell Object formatting
# ConvertTo-JSON is only available on PowerShell 3.0 and later
psversion = shell_info("powershell")["psversion"]
@ -4085,7 +4099,7 @@ def powershell(
encoded_cmd = False
# Retrieve the response, while overriding shell with 'powershell'
response = run(
response = run_stdout(
cmd,
cwd=cwd,
stdin=stdin,
@ -4113,9 +4127,8 @@ def powershell(
**kwargs,
)
# Sometimes Powershell returns an empty string, which isn't valid JSON
if response == "":
response = "{}"
response = _prep_powershell_json(response)
try:
return salt.utils.json.loads(response)
except Exception: # pylint: disable=broad-except
@ -4419,10 +4432,16 @@ def powershell_all(
else:
python_shell = True
if isinstance(cmd, list):
cmd = " ".join(cmd)
# Append PowerShell Object formatting
cmd += " | ConvertTo-JSON"
if depth is not None:
cmd += f" -Depth {depth}"
# ConvertTo-JSON is only available on PowerShell 3.0 and later
psversion = shell_info("powershell")["psversion"]
if salt.utils.versions.version_cmp(psversion, "2.0") == 1:
cmd += " | ConvertTo-JSON"
if depth is not None:
cmd += f" -Depth {depth}"
if encode_cmd:
# Convert the cmd to UTF-16LE without a BOM and base64 encode.
@ -4474,6 +4493,8 @@ def powershell_all(
response["result"] = []
return response
stdoutput = _prep_powershell_json(stdoutput)
# If we fail to parse stdoutput we will raise an exception
try:
result = salt.utils.json.loads(stdoutput)
@ -4492,9 +4513,30 @@ def powershell_all(
else:
# result type is list so the force_list param has no effect
response["result"] = result
# Encoded commands dump CLIXML data in stderr. It's not an actual error
if "CLIXML" in response["stderr"]:
response["stderr"] = ""
return response
def _prep_powershell_json(text):
"""
Try to fix the output from OutputTo-JSON in powershell commands to make it
valid JSON
"""
# An empty string just needs to be an empty quote
if text == "":
text = '""'
else:
# Raw text needs to be quoted
starts_with = ['"', "{", "["]
if not any(text.startswith(x) for x in starts_with):
text = f'"{text}"'
return text
def run_bg(
cmd,
cwd=None,

View file

@ -165,14 +165,18 @@ def build_rule(
del kwargs["counter"]
if "saddr" in kwargs or "source" in kwargs:
rule += "ip saddr {} ".format(kwargs.get("saddr") or kwargs.get("source"))
rule += "{} saddr {} ".format(
nft_family, kwargs.get("saddr") or kwargs.get("source")
)
if "saddr" in kwargs:
del kwargs["saddr"]
if "source" in kwargs:
del kwargs["source"]
if "daddr" in kwargs or "destination" in kwargs:
rule += "ip daddr {} ".format(kwargs.get("daddr") or kwargs.get("destination"))
rule += "{} daddr {} ".format(
nft_family, kwargs.get("daddr") or kwargs.get("destination")
)
if "daddr" in kwargs:
del kwargs["daddr"]
if "destination" in kwargs:

View file

@ -450,7 +450,9 @@ def get_config():
raise
config = dict()
if raw_config:
if not raw_config:
raise CommandExecutionError("Not Configured")
else:
# Does this Configuration contain a single resource
if "ConfigurationName" in raw_config:
# Load the single resource
@ -606,11 +608,13 @@ def test_config():
"""
cmd = "Test-DscConfiguration"
try:
_pshell(cmd, ignore_retcode=True)
result = _pshell(cmd, ignore_retcode=True)
except CommandExecutionError as exc:
if "Current configuration does not exist" in exc.info["stderr"]:
raise CommandExecutionError("Not Configured")
raise
if not result:
raise CommandExecutionError("Not Configured")
return True
@ -635,11 +639,14 @@ def get_config_status():
"Type, Mode, RebootRequested, NumberofResources"
)
try:
return _pshell(cmd, ignore_retcode=True)
result = _pshell(cmd, ignore_retcode=True)
except CommandExecutionError as exc:
if "No status information available" in exc.info["stderr"]:
raise CommandExecutionError("Not Configured")
raise
if not result:
raise CommandExecutionError("Not Configured")
return result
def get_lcm_config():

View file

@ -171,12 +171,12 @@ by setting the following minion configuration value:
import base64
import copy
import datetime
import glob
import logging
import os.path
import re
import sys
from datetime import datetime, timedelta, timezone
try:
import cryptography.x509 as cx509
@ -1408,10 +1408,12 @@ def expires(certificate, days=0):
Defaults to ``0``, which checks for the current time.
"""
cert = x509util.load_cert(certificate)
# dates are encoded in UTC/GMT, they are returned as a naive datetime object
return cert.not_valid_after <= datetime.datetime.utcnow() + datetime.timedelta(
days=days
)
try:
not_after = cert.not_valid_after_utc
except AttributeError:
# naive datetime object, release <42 (it's always UTC)
not_after = cert.not_valid_after.replace(tzinfo=timezone.utc)
return not_after <= datetime.now(tz=timezone.utc) + timedelta(days=days)
def expired(certificate):
@ -1691,6 +1693,13 @@ def read_certificate(certificate):
cert = x509util.load_cert(certificate)
key_type = x509util.get_key_type(cert.public_key(), as_string=True)
try:
not_before = cert.not_valid_before_utc
not_after = cert.not_valid_after_utc
except AttributeError:
# naive datetime object, release <42 (it's always UTC)
not_before = cert.not_valid_before.replace(tzinfo=timezone.utc)
not_after = cert.not_valid_after.replace(tzinfo=timezone.utc)
ret = {
"version": cert.version.value + 1, # 0-indexed
"key_size": cert.public_key().key_size if key_type in ["ec", "rsa"] else None,
@ -1706,8 +1715,8 @@ def read_certificate(certificate):
"issuer": _parse_dn(cert.issuer),
"issuer_hash": x509util.pretty_hex(_get_name_hash(cert.issuer)),
"issuer_str": cert.issuer.rfc4514_string(),
"not_before": cert.not_valid_before.strftime(x509util.TIME_FMT),
"not_after": cert.not_valid_after.strftime(x509util.TIME_FMT),
"not_before": not_before.strftime(x509util.TIME_FMT),
"not_after": not_after.strftime(x509util.TIME_FMT),
"public_key": get_public_key(cert),
"extensions": _parse_extensions(cert.extensions),
}
@ -1773,10 +1782,16 @@ def read_crl(crl):
The certificate revocation list to read.
"""
crl = x509util.load_crl(crl)
try:
last_update = crl.last_update_utc
next_update = crl.next_update_utc
except AttributeError:
last_update = crl.last_update.replace(tzinfo=timezone.utc)
next_update = crl.next_update.replace(tzinfo=timezone.utc)
ret = {
"issuer": _parse_dn(crl.issuer),
"last_update": crl.last_update.strftime(x509util.TIME_FMT),
"next_update": crl.next_update.strftime(x509util.TIME_FMT),
"last_update": last_update.strftime(x509util.TIME_FMT),
"next_update": next_update.strftime(x509util.TIME_FMT),
"revoked_certificates": {},
"extensions": _parse_extensions(crl.extensions),
}
@ -1796,12 +1811,15 @@ def read_crl(crl):
ret["signature_algorithm"] = crl.signature_algorithm_oid.dotted_string
for revoked in crl:
try:
revocation_date = revoked.revocation_date_utc
except AttributeError:
# naive datetime object, release <42 (it's always UTC)
revocation_date = revoked.revocation_date.replace(tzinfo=timezone.utc)
ret["revoked_certificates"].update(
{
x509util.dec2hex(revoked.serial_number).replace(":", ""): {
"revocation_date": revoked.revocation_date.strftime(
x509util.TIME_FMT
),
"revocation_date": revocation_date.strftime(x509util.TIME_FMT),
"extensions": _parse_crl_entry_extensions(revoked.extensions),
}
}

View file

@ -183,9 +183,9 @@ according to the www policy.
import base64
import copy
import datetime
import logging
import os.path
from datetime import datetime, timedelta, timezone
import salt.utils.files
from salt.exceptions import CommandExecutionError, SaltInvocationError
@ -483,11 +483,16 @@ def certificate_managed(
else None
):
changes["pkcs12_friendlyname"] = pkcs12_friendlyname
try:
curr_not_after = current.not_valid_after_utc
except AttributeError:
# naive datetime object, release <42 (it's always UTC)
curr_not_after = current.not_valid_after.replace(
tzinfo=timezone.utc
)
if (
current.not_valid_after
< datetime.datetime.utcnow()
+ datetime.timedelta(days=days_remaining)
if curr_not_after < datetime.now(tz=timezone.utc) + timedelta(
days=days_remaining
):
changes["expiration"] = True
@ -892,10 +897,14 @@ def crl_managed(
if encoding != current_encoding:
changes["encoding"] = encoding
try:
curr_next_update = current.next_update_utc
except AttributeError:
# naive datetime object, release <42 (it's always UTC)
curr_next_update = current.next_update.replace(tzinfo=timezone.utc)
if days_remaining and (
current.next_update
< datetime.datetime.utcnow()
+ datetime.timedelta(days=days_remaining)
curr_next_update
< datetime.now(tz=timezone.utc) + timedelta(days=days_remaining)
):
changes["expiration"] = True

View file

@ -6,16 +6,24 @@ A salt util for modifying firewall settings.
This util allows you to modify firewall settings in the local group policy in
addition to the normal firewall settings. Parameters are taken from the
netsh advfirewall prompt.
netsh advfirewall prompt. This utility has been adapted to use powershell
instead of the ``netsh`` command to make it compatible with non-English systems.
It maintains the ``netsh`` commands and parameters, but it is using powershell
under the hood.
.. versionchanged:: 3008.0
.. note::
More information can be found in the advfirewall context in netsh. This can
be access by opening a netsh prompt. At a command prompt type the following:
be accessed by opening a netsh prompt. At a command prompt type the
following:
c:\>netsh
netsh>advfirewall
netsh advfirewall>set help
netsh advfirewall>set domain help
.. code-block:: powershell
c:\>netsh
netsh>advfirewall
netsh advfirewall>set help
netsh advfirewall>set domain help
Usage:
@ -66,87 +74,73 @@ Usage:
store='lgpo')
"""
import logging
import os
import re
import socket
import tempfile
from textwrap import dedent
import salt.modules.cmdmod
import salt.utils.platform
import salt.utils.win_pwsh
from salt.exceptions import CommandExecutionError
log = logging.getLogger(__name__)
__hostname__ = socket.gethostname()
__virtualname__ = "netsh"
ON_OFF = {
0: "OFF",
1: "ON",
2: "NotConfigured",
"off": "False",
"on": "True",
"notconfigured": "NotConfigured",
}
ENABLE_DISABLE = {
0: "Disable",
1: "Enable",
2: "NotConfigured",
"disable": 0,
"enable": 1,
"notconfigured": 2,
}
OUTBOUND = {
0: "NotConfigured",
2: "AllowOutbound",
4: "BlockOutbound",
"notconfigured": "NotConfigured",
"allowoutbound": "Allow",
"blockoutbound": "Block",
}
# Although utils are often directly imported, it is also possible to use the
# loader.
def __virtual__():
def _get_inbound_text(rule, action):
"""
Only load if on a Windows system
The "Inbound connections" setting is a combination of 2 parameters:
- AllowInboundRules
- DefaultInboundAction
The settings are as follows:
Rules Action
2 2 AllowInbound
2 4 BlockInbound
0 4 BlockInboundAlways
2 0 NotConfigured
"""
if not salt.utils.platform.is_windows():
return False, "This utility only available on Windows"
return __virtualname__
settings = {
0: {
4: "BlockInboundAlways",
},
2: {
0: "NotConfigured",
2: "AllowInbound",
4: "BlockInbound",
},
}
return settings[rule][action]
def _netsh_file(content):
"""
helper function to get the results of ``netsh -f content.txt``
Running ``netsh`` will drop you into a ``netsh`` prompt where you can issue
``netsh`` commands. You can put a series of commands in an external file and
run them as if from a ``netsh`` prompt using the ``-f`` switch. That's what
this function does.
Args:
content (str):
The contents of the file that will be run by the ``netsh -f``
command
Returns:
str: The text returned by the netsh command
"""
with tempfile.NamedTemporaryFile(
mode="w", prefix="salt-", suffix=".netsh", delete=False, encoding="utf-8"
) as fp:
fp.write(content)
try:
log.debug("%s:\n%s", fp.name, content)
return salt.modules.cmdmod.run(f"netsh -f {fp.name}", python_shell=True)
finally:
os.remove(fp.name)
def _netsh_command(command, store):
if store.lower() not in ("local", "lgpo"):
raise ValueError(f"Incorrect store: {store}")
# set the store for local or lgpo
if store.lower() == "local":
netsh_script = dedent(
"""\
advfirewall
set store local
{}
""".format(
command
)
)
else:
netsh_script = dedent(
"""\
advfirewall
set store gpo = {}
{}
""".format(
__hostname__, command
)
)
return _netsh_file(content=netsh_script).splitlines()
def _get_inbound_settings(text):
settings = {
"allowinbound": (2, 2),
"blockinbound": (2, 4),
"blockinboundalways": (0, 4),
"notconfigured": (2, 0),
}
return settings[text.lower()]
def get_settings(profile, section, store="local"):
@ -195,70 +189,54 @@ def get_settings(profile, section, store="local"):
raise ValueError(f"Incorrect section: {section}")
if store.lower() not in ("local", "lgpo"):
raise ValueError(f"Incorrect store: {store}")
command = f"show {profile}profile {section}"
# run it
results = _netsh_command(command=command, store=store)
# sample output:
# Domain Profile Settings:
# ----------------------------------------------------------------------
# LocalFirewallRules N/A (GPO-store only)
# LocalConSecRules N/A (GPO-store only)
# InboundUserNotification Disable
# RemoteManagement Disable
# UnicastResponseToMulticast Enable
# if it's less than 3 lines it failed
if len(results) < 3:
raise CommandExecutionError(f"Invalid results: {results}")
ret = {}
# Skip the first 2 lines. Add everything else to a dictionary
for line in results[3:]:
ret.update(dict(list(zip(*[iter(re.split(r"\s{2,}", line))] * 2))))
# Build the powershell command
cmd = ["Get-NetFirewallProfile"]
if profile:
cmd.append(profile)
if store and store.lower() == "lgpo":
cmd.extend(["-PolicyStore", "localhost"])
# Remove spaces from the values so that `Not Configured` is detected
# correctly
for item in ret:
ret[item] = ret[item].replace(" ", "")
# Run the command
settings = salt.utils.win_pwsh.run_dict(cmd)
# special handling for firewallpolicy
if section == "firewallpolicy":
inbound, outbound = ret["Firewall Policy"].split(",")
return {"Inbound": inbound, "Outbound": outbound}
# A successful run should return a dictionary
if not settings:
raise CommandExecutionError("LGPO NETSH: An unknown error occurred")
return ret
# Remove the junk
for setting in list(settings.keys()):
if setting.startswith("Cim"):
settings.pop(setting)
# Make it look like netsh output
ret_settings = {
"firewallpolicy": {
"Inbound": _get_inbound_text(
settings["AllowInboundRules"], settings["DefaultInboundAction"]
),
"Outbound": OUTBOUND[settings["DefaultOutboundAction"]],
},
"state": {
"State": ON_OFF[settings["Enabled"]],
},
"logging": {
"FileName": settings["LogFileName"],
"LogAllowedConnections": ENABLE_DISABLE[settings["LogAllowed"]],
"LogDroppedConnections": ENABLE_DISABLE[settings["LogBlocked"]],
"MaxFileSize": settings["LogMaxSizeKilobytes"],
},
"settings": {
"InboundUserNotification": ENABLE_DISABLE[settings["NotifyOnListen"]],
"LocalConSecRules": ENABLE_DISABLE[settings["AllowLocalIPsecRules"]],
"LocalFirewallRules": ENABLE_DISABLE[settings["AllowLocalFirewallRules"]],
"UnicastResponseToMulticast": ENABLE_DISABLE[
settings["AllowUnicastResponseToMulticast"]
],
},
}
def get_all_settings(profile, store="local"):
"""
Gets all the properties for the specified profile in the specified store
Args:
profile (str):
The firewall profile to query. Valid options are:
- domain
- public
- private
store (str):
The store to use. This is either the local firewall policy or the
policy defined by local group policy. Valid options are:
- lgpo
- local
Default is ``local``
Returns:
dict: A dictionary containing the specified settings
"""
ret = dict()
ret.update(get_settings(profile=profile, section="state", store=store))
ret.update(get_settings(profile=profile, section="firewallpolicy", store=store))
ret.update(get_settings(profile=profile, section="settings", store=store))
ret.update(get_settings(profile=profile, section="logging", store=store))
return ret
return ret_settings[section.lower()]
def get_all_profiles(store="local"):
@ -286,6 +264,82 @@ def get_all_profiles(store="local"):
}
def get_all_settings(profile, store="local"):
"""
Gets all the properties for the specified profile in the specified store
Args:
profile (str):
The firewall profile to query. Valid options are:
- domain
- public
- private
store (str):
The store to use. This is either the local firewall policy or the
policy defined by local group policy. Valid options are:
- lgpo
- local
Default is ``local``
Returns:
dict: A dictionary containing the specified settings
Raises:
CommandExecutionError: If an error occurs
ValueError: If the parameters are incorrect
"""
# validate input
if profile.lower() not in ("domain", "public", "private"):
raise ValueError(f"Incorrect profile: {profile}")
if store.lower() not in ("local", "lgpo"):
raise ValueError(f"Incorrect store: {store}")
# Build the powershell command
cmd = ["Get-NetFirewallProfile"]
if profile:
cmd.append(profile)
if store and store.lower() == "lgpo":
cmd.extend(["-PolicyStore", "localhost"])
# Run the command
settings = salt.utils.win_pwsh.run_dict(cmd)
# A successful run should return a dictionary
if not settings:
raise CommandExecutionError("LGPO NETSH: An unknown error occurred")
# Remove the junk
for setting in list(settings.keys()):
if setting.startswith("Cim"):
settings.pop(setting)
# Make it look like netsh output
ret_settings = {
"FileName": settings["LogFileName"],
"Inbound": _get_inbound_text(
settings["AllowInboundRules"], settings["DefaultInboundAction"]
),
"InboundUserNotification": ENABLE_DISABLE[settings["NotifyOnListen"]],
"LocalConSecRules": ENABLE_DISABLE[settings["AllowLocalIPsecRules"]],
"LocalFirewallRules": ENABLE_DISABLE[settings["AllowLocalFirewallRules"]],
"LogAllowedConnections": ENABLE_DISABLE[settings["LogAllowed"]],
"LogDroppedConnections": ENABLE_DISABLE[settings["LogBlocked"]],
"MaxFileSize": settings["LogMaxSizeKilobytes"],
"Outbound": OUTBOUND[settings["DefaultOutboundAction"]],
"State": ON_OFF[settings["Enabled"]],
"UnicastResponseToMulticast": ON_OFF[
settings["AllowUnicastResponseToMulticast"]
],
}
return ret_settings
def set_firewall_settings(profile, inbound=None, outbound=None, store="local"):
"""
Set the firewall inbound/outbound settings for the specified profile and
@ -307,7 +361,7 @@ def set_firewall_settings(profile, inbound=None, outbound=None, store="local"):
- blockinbound
- blockinboundalways
- allowinbound
- notconfigured
- notconfigured <=== lgpo only
Default is ``None``
@ -317,7 +371,7 @@ def set_firewall_settings(profile, inbound=None, outbound=None, store="local"):
- allowoutbound
- blockoutbound
- notconfigured
- notconfigured <=== lgpo only
Default is ``None``
@ -355,21 +409,34 @@ def set_firewall_settings(profile, inbound=None, outbound=None, store="local"):
raise ValueError(f"Incorrect outbound value: {outbound}")
if not inbound and not outbound:
raise ValueError("Must set inbound or outbound")
if store == "local":
if inbound and inbound.lower() == "notconfigured":
msg = "Cannot set local inbound policies as NotConfigured"
raise CommandExecutionError(msg)
if outbound and outbound.lower() == "notconfigured":
msg = "Cannot set local outbound policies as NotConfigured"
raise CommandExecutionError(msg)
# You have to specify inbound and outbound setting at the same time
# If you're only specifying one, you have to get the current setting for the
# other
if not inbound or not outbound:
ret = get_settings(profile=profile, section="firewallpolicy", store=store)
if not inbound:
inbound = ret["Inbound"]
if not outbound:
outbound = ret["Outbound"]
# Build the powershell command
cmd = ["Set-NetFirewallProfile"]
if profile:
cmd.append(profile)
if store and store.lower() == "lgpo":
cmd.extend(["-PolicyStore", "localhost"])
command = f"set {profile}profile firewallpolicy {inbound},{outbound}"
# Get inbound settings
if inbound:
in_rule, in_action = _get_inbound_settings(inbound.lower())
cmd.extend(["-AllowInboundRules", in_rule, "-DefaultInboundAction", in_action])
results = _netsh_command(command=command, store=store)
if outbound:
out_rule = OUTBOUND[outbound.lower()]
cmd.extend(["-DefaultOutboundAction", out_rule])
# Run the command
results = salt.utils.win_pwsh.run_dict(cmd)
# A successful run should return an empty list
if results:
raise CommandExecutionError(f"An error occurred: {results}")
@ -442,6 +509,10 @@ def set_logging_settings(profile, setting, value, store="local"):
# Input validation
if profile.lower() not in ("domain", "public", "private"):
raise ValueError(f"Incorrect profile: {profile}")
if store == "local":
if str(value).lower() == "notconfigured":
msg = "Cannot set local policies as NotConfigured"
raise CommandExecutionError(msg)
if setting.lower() not in (
"allowedconnections",
"droppedconnections",
@ -449,13 +520,21 @@ def set_logging_settings(profile, setting, value, store="local"):
"maxfilesize",
):
raise ValueError(f"Incorrect setting: {setting}")
settings = {"filename": ["-LogFileName", value]}
if setting.lower() in ("allowedconnections", "droppedconnections"):
if value.lower() not in ("enable", "disable", "notconfigured"):
raise ValueError(f"Incorrect value: {value}")
settings.update(
{
"allowedconnections": ["-LogAllowed", ENABLE_DISABLE[value.lower()]],
"droppedconnections": ["-LogBlocked", ENABLE_DISABLE[value.lower()]],
}
)
# TODO: Consider adding something like the following to validate filename
# https://stackoverflow.com/questions/9532499/check-whether-a-path-is-valid-in-python-without-creating-a-file-at-the-paths-ta
if setting.lower() == "maxfilesize":
if value.lower() != "notconfigured":
if str(value).lower() != "notconfigured":
# Must be a number between 1 and 32767
try:
int(value)
@ -463,9 +542,18 @@ def set_logging_settings(profile, setting, value, store="local"):
raise ValueError(f"Incorrect value: {value}")
if not 1 <= int(value) <= 32767:
raise ValueError(f"Incorrect value: {value}")
# Run the command
command = f"set {profile}profile logging {setting} {value}"
results = _netsh_command(command=command, store=store)
settings.update({"maxfilesize": ["-LogMaxSizeKilobytes", value]})
# Build the powershell command
cmd = ["Set-NetFirewallProfile"]
if profile:
cmd.append(profile)
if store and store.lower() == "lgpo":
cmd.extend(["-PolicyStore", "localhost"])
cmd.extend(settings[setting.lower()])
results = salt.utils.win_pwsh.run_dict(cmd)
# A successful run should return an empty list
if results:
@ -493,7 +581,6 @@ def set_settings(profile, setting, value, store="local"):
- localfirewallrules
- localconsecrules
- inboundusernotification
- remotemanagement
- unicastresponsetomulticast
value (str):
@ -526,16 +613,42 @@ def set_settings(profile, setting, value, store="local"):
"localfirewallrules",
"localconsecrules",
"inboundusernotification",
"remotemanagement",
"unicastresponsetomulticast",
):
raise ValueError(f"Incorrect setting: {setting}")
if value.lower() not in ("enable", "disable", "notconfigured"):
raise ValueError(f"Incorrect value: {value}")
if setting.lower() in ["localfirewallrules", "localconsecrules"]:
if store.lower() != "lgpo":
msg = f"{setting} can only be set using Group Policy"
raise CommandExecutionError(msg)
if setting.lower() == "inboundusernotification" and store.lower() != "lgpo":
if value.lower() == "notconfigured":
msg = "NotConfigured is only valid when setting group policy"
raise CommandExecutionError(msg)
# Run the command
command = f"set {profile}profile settings {setting} {value}"
results = _netsh_command(command=command, store=store)
# Build the powershell command
cmd = ["Set-NetFirewallProfile"]
if profile:
cmd.append(profile)
if store and store.lower() == "lgpo":
cmd.extend(["-PolicyStore", "localhost"])
settings = {
"localfirewallrules": [
"-AllowLocalFirewallRules",
ENABLE_DISABLE[value.lower()],
],
"localconsecrules": ["-AllowLocalIPsecRules", ENABLE_DISABLE[value.lower()]],
"inboundusernotification": ["-NotifyOnListen", ENABLE_DISABLE[value.lower()]],
"unicastresponsetomulticast": [
"-AllowUnicastResponseToMulticast",
ENABLE_DISABLE[value.lower()],
],
}
cmd.extend(settings[setting.lower()])
results = salt.utils.win_pwsh.run_dict(cmd)
# A successful run should return an empty list
if results:
@ -546,7 +659,7 @@ def set_settings(profile, setting, value, store="local"):
def set_state(profile, state, store="local"):
"""
Configure the firewall state.
Enable or disable the firewall profile.
Args:
@ -583,12 +696,22 @@ def set_state(profile, state, store="local"):
# Input validation
if profile.lower() not in ("domain", "public", "private"):
raise ValueError(f"Incorrect profile: {profile}")
if state.lower() not in ("on", "off", "notconfigured"):
raise ValueError(f"Incorrect state: {state}")
if not isinstance(state, bool):
if state.lower() not in ("on", "off", "notconfigured"):
raise ValueError(f"Incorrect state: {state}")
else:
state = "On" if state else "Off"
# Run the command
command = f"set {profile}profile state {state}"
results = _netsh_command(command=command, store=store)
# Build the powershell command
cmd = ["Set-NetFirewallProfile"]
if profile:
cmd.append(profile)
if store and store.lower() == "lgpo":
cmd.extend(["-PolicyStore", "localhost"])
cmd.extend(["-Enabled", ON_OFF[state.lower()]])
results = salt.utils.win_pwsh.run_dict(cmd)
# A successful run should return an empty list
if results:

View file

@ -3,26 +3,16 @@ import salt.utils.json
import salt.utils.platform
from salt.exceptions import CommandExecutionError
__virtualname__ = "win_pwsh"
def __virtual__():
"""
Only load if windows
"""
if not salt.utils.platform.is_windows():
return False, "This utility will only run on Windows"
return __virtualname__
def run_dict(cmd, cwd=None):
"""
Execute the powershell command and return the data as a dictionary
.. versionadded:: 3006.9
Args:
cmd (str): The powershell command to run
cmd (str,list): The powershell command to run
cwd (str): The current working directory
@ -34,6 +24,8 @@ def run_dict(cmd, cwd=None):
If an error is encountered or the command does not complete
successfully
"""
if isinstance(cmd, list):
cmd = " ".join(map(str, cmd))
if "convertto-json" not in cmd.lower():
cmd = f"{cmd} | ConvertTo-Json"
if "progresspreference" not in cmd.lower():

View file

@ -52,6 +52,9 @@ def __virtual__():
def split_username(username):
"""
Splits out the username from the domain name and returns both.
"""
domain = "."
user_name = username
if "@" in username:
@ -234,7 +237,7 @@ def runas(cmdLine, username, password=None, cwd=None):
fd_out = msvcrt.open_osfhandle(stdout_read.handle, os.O_RDONLY | os.O_TEXT)
with os.fdopen(fd_out, "r") as f_out:
stdout = f_out.read()
ret["stdout"] = stdout
ret["stdout"] = stdout.strip()
# Read standard error
fd_err = msvcrt.open_osfhandle(stderr_read.handle, os.O_RDONLY | os.O_TEXT)

View file

@ -1,10 +1,10 @@
import base64
import copy
import datetime
import ipaddress
import logging
import os.path
import re
from datetime import datetime, timedelta, timezone
from enum import Enum
from urllib.parse import urlparse, urlunparse
@ -313,14 +313,14 @@ def build_crt(
)
not_before = (
datetime.datetime.strptime(not_before, TIME_FMT)
datetime.strptime(not_before, TIME_FMT).replace(tzinfo=timezone.utc)
if not_before
else datetime.datetime.utcnow()
else datetime.now(tz=timezone.utc)
)
not_after = (
datetime.datetime.strptime(not_after, TIME_FMT)
datetime.strptime(not_after, TIME_FMT).replace(tzinfo=timezone.utc)
if not_after
else datetime.datetime.utcnow() + datetime.timedelta(days=days_valid)
else datetime.now(tz=timezone.utc) + timedelta(days=days_valid)
)
builder = builder.not_valid_before(not_before).not_valid_after(not_after)
@ -422,32 +422,38 @@ def build_crl(
builder = cx509.CertificateRevocationListBuilder()
if signing_cert:
builder = builder.issuer_name(signing_cert.subject)
builder = builder.last_update(datetime.datetime.today())
builder = builder.last_update(datetime.now(tz=timezone.utc))
builder = builder.next_update(
datetime.datetime.today() + datetime.timedelta(days=days_valid)
datetime.now(tz=timezone.utc) + timedelta(days=days_valid)
)
for rev in revoked:
serial_number = not_after = revocation_date = None
if "not_after" in rev:
not_after = datetime.datetime.strptime(rev["not_after"], TIME_FMT)
not_after = datetime.strptime(rev["not_after"], TIME_FMT).replace(
tzinfo=timezone.utc
)
if "serial_number" in rev:
serial_number = rev["serial_number"]
if "certificate" in rev:
rev_cert = load_cert(rev["certificate"])
serial_number = rev_cert.serial_number
not_after = rev_cert.not_valid_after
try:
not_after = rev_cert.not_valid_after_utc
except AttributeError:
# naive datetime object, release <42 (it's always UTC)
not_after = rev_cert.not_valid_after.replace(tzinfo=timezone.utc)
if not serial_number:
raise SaltInvocationError("Need serial_number or certificate")
serial_number = _get_serial_number(serial_number)
if not_after and not include_expired:
if datetime.datetime.utcnow() > not_after:
if datetime.now(tz=timezone.utc) > not_after:
continue
if "revocation_date" in rev:
revocation_date = datetime.datetime.strptime(
revocation_date = datetime.strptime(
rev["revocation_date"], TIME_FMT
)
).replace(tzinfo=timezone.utc)
else:
revocation_date = datetime.datetime.utcnow()
revocation_date = datetime.now(tz=timezone.utc)
revoked_cert = cx509.RevokedCertificateBuilder(
serial_number=serial_number, revocation_date=revocation_date
@ -1624,8 +1630,9 @@ def _create_invalidity_date(val, **kwargs):
if critical:
val = val.split(" ", maxsplit=1)[1]
try:
# InvalidityDate deals in naive datetime objects only currently
return (
cx509.InvalidityDate(datetime.datetime.strptime(val, TIME_FMT)),
cx509.InvalidityDate(datetime.strptime(val, TIME_FMT)),
critical,
)
except ValueError as err:

View file

@ -34,18 +34,18 @@ import pytest
from tests.support.case import SSHCase
pytestmark = [
pytest.mark.skip_on_windows,
pytest.mark.skipif(
"grains['osfinger'] == 'Fedora Linux-39'",
reason="Fedora 39 ships with Python 3.12. Test can't run with system Python on 3.12",
'grains["osfinger"].startswith(("Fedora Linux-40", "Ubuntu-24.04", "Arch Linux"))',
reason="System ships with a version of python that is too recent for salt-ssh tests",
# Actually, the problem is that the tornado we ship is not prepared for Python 3.12,
# and it imports `ssl` and checks if the `match_hostname` function is defined, which
# has been deprecated since Python 3.7, so, the logic goes into trying to import
# backports.ssl-match-hostname which is not installed on the system.
)
),
]
@pytest.mark.skip_on_windows
class SSHCustomModuleTest(SSHCase):
"""
Test sls with custom module functionality using ssh

View file

@ -12,14 +12,16 @@ from tests.support.case import SSHCase
from tests.support.runtests import RUNTIME_VARS
pytestmark = [
pytest.mark.slow_test,
pytest.mark.timeout_unless_on_windows(120),
pytest.mark.skipif(
"grains['osfinger'] == 'Fedora Linux-39'",
reason="Fedora 39 ships with Python 3.12. Test can't run with system Python on 3.12",
'grains["osfinger"].startswith(("Fedora Linux-40", "Ubuntu-24.04", "Arch Linux"))',
reason="System ships with a version of python that is too recent for salt-ssh tests",
# Actually, the problem is that the tornado we ship is not prepared for Python 3.12,
# and it imports `ssl` and checks if the `match_hostname` function is defined, which
# has been deprecated since Python 3.7, so, the logic goes into trying to import
# backports.ssl-match-hostname which is not installed on the system.
)
),
]
SSH_SLS = "ssh_state_tests"
@ -28,8 +30,6 @@ SSH_SLS_FILE = "/tmp/salt_test_file"
log = logging.getLogger(__name__)
@pytest.mark.slow_test
@pytest.mark.timeout_unless_on_windows(120)
class SSHStateTest(SSHCase):
"""
testing the state system with salt-ssh
@ -55,7 +55,6 @@ class SSHStateTest(SSHCase):
exp_ret=SSH_SLS,
)
@pytest.mark.slow_test
def test_state_apply(self):
"""
test state.apply with salt-ssh
@ -66,7 +65,6 @@ class SSHStateTest(SSHCase):
check_file = self.run_function("file.file_exists", [SSH_SLS_FILE])
self.assertTrue(check_file)
@pytest.mark.slow_test
def test_state_sls_id(self):
"""
test state.sls_id with salt-ssh
@ -93,7 +91,6 @@ class SSHStateTest(SSHCase):
check_file = self.run_function("file.file_exists", [SSH_SLS_FILE])
self.assertTrue(check_file)
@pytest.mark.slow_test
def test_state_sls_wrong_id(self):
"""
test state.sls_id when id does not exist
@ -102,7 +99,6 @@ class SSHStateTest(SSHCase):
ret = self.run_function("state.sls_id", ["doesnotexist", SSH_SLS])
assert "No matches for ID" in ret
@pytest.mark.slow_test
def test_state_sls_id_with_pillar(self):
"""
test state.sls_id with pillar data
@ -116,7 +112,6 @@ class SSHStateTest(SSHCase):
)
self.assertTrue(check_file)
@pytest.mark.slow_test
def test_state_show_sls(self):
"""
test state.show_sls with salt-ssh
@ -127,7 +122,6 @@ class SSHStateTest(SSHCase):
check_file = self.run_function("file.file_exists", [SSH_SLS_FILE], wipe=False)
self.assertFalse(check_file)
@pytest.mark.slow_test
def test_state_show_top(self):
"""
test state.show_top with salt-ssh
@ -154,7 +148,6 @@ class SSHStateTest(SSHCase):
ret = self.run_function("state.show_top")
self.assertEqual(ret, {"base": ["core", "master_tops_test"]})
@pytest.mark.slow_test
def test_state_single(self):
"""
state.single with salt-ssh
@ -172,7 +165,6 @@ class SSHStateTest(SSHCase):
self.assertEqual(value["result"], ret_out["result"])
self.assertEqual(value["comment"], ret_out["comment"])
@pytest.mark.slow_test
def test_show_highstate(self):
"""
state.show_highstate with salt-ssh
@ -202,7 +194,6 @@ class SSHStateTest(SSHCase):
self.assertIn(destpath, high)
self.assertEqual(high[destpath]["__env__"], "base")
@pytest.mark.slow_test
def test_state_high(self):
"""
state.high with salt-ssh
@ -220,7 +211,6 @@ class SSHStateTest(SSHCase):
self.assertEqual(value["result"], ret_out["result"])
self.assertEqual(value["comment"], ret_out["comment"])
@pytest.mark.slow_test
def test_show_lowstate(self):
"""
state.show_lowstate with salt-ssh
@ -248,7 +238,6 @@ class SSHStateTest(SSHCase):
self.assertIsInstance(low, list)
self.assertIsInstance(low[0], dict)
@pytest.mark.slow_test
def test_state_low(self):
"""
state.low with salt-ssh
@ -267,7 +256,6 @@ class SSHStateTest(SSHCase):
self.assertEqual(value["result"], ret_out["result"])
self.assertEqual(value["comment"], ret_out["comment"])
@pytest.mark.slow_test
def test_state_request_check_clear(self):
"""
test state.request system with salt-ssh
@ -281,7 +269,6 @@ class SSHStateTest(SSHCase):
clear = self.run_function("state.clear_request", wipe=False)
self._check_request(empty=True)
@pytest.mark.slow_test
def test_state_run_request(self):
"""
test state.request system with salt-ssh
@ -295,7 +282,6 @@ class SSHStateTest(SSHCase):
check_file = self.run_function("file.file_exists", [SSH_SLS_FILE], wipe=False)
self.assertTrue(check_file)
@pytest.mark.slow_test
def test_state_running(self):
"""
test state.running with salt-ssh

View file

@ -9,6 +9,7 @@ import pytest
pytestmark = [
pytest.mark.skip_on_windows,
pytest.mark.destructive_test,
pytest.mark.timeout_unless_on_windows(240),
pytest.mark.skipif(
'grains["os_family"] == "Suse"',
reason="Zypperpkg module removed as a part of great module migration",

View file

@ -9,6 +9,7 @@ import pytest
pytestmark = [
pytest.mark.skip_on_windows,
pytest.mark.destructive_test,
pytest.mark.timeout_unless_on_windows(240),
pytest.mark.skipif(
'grains["os_family"] == "Suse"',
reason="Zypperpkg module removed as a part of great module migration",

View file

@ -9,6 +9,7 @@ import pytest
pytestmark = [
pytest.mark.skip_on_windows,
pytest.mark.destructive_test,
pytest.mark.timeout_unless_on_windows(240),
pytest.mark.skipif(
'grains["oscodename"] == "Photon"',
reason="vim package not available for this distribution",

View file

@ -1,10 +1,7 @@
import base64
import pytest
import salt.modules.cmdmod as cmdmod
import salt.utils.path
import salt.utils.stringutils
pytestmark = [
pytest.mark.windows_whitelisted,
@ -18,88 +15,197 @@ def shell(request):
This will run the test on powershell and powershell core (pwsh). If
powershell core is not installed that test run will be skipped
"""
if request.param == "pwsh" and salt.utils.path.which("pwsh") is None:
pytest.skip("Powershell 7 Not Present")
return request.param
def test_powershell(shell):
@pytest.fixture(scope="module")
def account():
with pytest.helpers.create_account() as _account:
yield _account
@pytest.mark.parametrize(
"cmd, expected, encode_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
('Write-Output "Encoded Foo"', "Encoded Foo", True),
(["Write-Output", '"Encoded Foo"'], "Encoded Foo", True),
],
)
def test_powershell(shell, cmd, expected, encode_cmd):
"""
Test cmd.powershell
"""
ret = cmdmod.powershell("Write-Output foo", shell=shell)
assert ret == "foo"
ret = cmdmod.powershell(cmd=cmd, encode_cmd=encode_cmd, shell=shell)
assert ret == expected
def test_powershell_encode_cmd(shell):
@pytest.mark.parametrize(
"cmd, expected, encode_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
('Write-Output "Encoded Foo"', "Encoded Foo", True),
(["Write-Output", '"Encoded Foo"'], "Encoded Foo", True),
],
)
def test_powershell_runas(shell, account, cmd, expected, encode_cmd):
"""
Test cmd.powershell with encode_cmd
Test cmd.powershell with runas
"""
ret = cmdmod.powershell('Write-Output "encoded foo"', encode_cmd=True, shell=shell)
assert ret == "encoded foo"
ret = cmdmod.powershell(
cmd=cmd,
encode_cmd=encode_cmd,
shell=shell,
runas=account.username,
password=account.password,
)
assert ret == expected
def test_powershell_all(shell):
@pytest.mark.parametrize(
"cmd, expected, encode_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
('Write-Output "Encoded Foo"', "Encoded Foo", True),
(["Write-Output", '"Encoded Foo"'], "Encoded Foo", True),
],
)
def test_powershell_all(shell, cmd, expected, encode_cmd):
"""
Test cmd.powershell_all
Test cmd.powershell_all. `encode_cmd` takes the passed command and encodes
it. Different from encoded_command where it's receiving an already encoded
command
"""
ret = cmdmod.powershell_all("Write-Output foo", shell=shell)
ret = cmdmod.powershell_all(cmd=cmd, encode_cmd=encode_cmd, shell=shell)
assert isinstance(ret["pid"], int)
assert ret["retcode"] == 0
assert ret["stderr"] == ""
assert ret["result"] == "foo"
assert ret["result"] == expected
def test_powershell_all_encode_cmd(shell):
@pytest.mark.parametrize(
"cmd, expected, encode_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
('Write-Output "Encoded Foo"', "Encoded Foo", True),
(["Write-Output", '"Encoded Foo"'], "Encoded Foo", True),
],
)
def test_powershell_all_runas(shell, account, cmd, expected, encode_cmd):
"""
Test cmd.powershell_all with encode_cmd
Test cmd.powershell_all with runas. `encode_cmd` takes the passed command
and encodes it. Different from encoded_command where it's receiving an
already encoded command
"""
ret = cmdmod.powershell_all(
'Write-Output "encoded foo"', encode_cmd=True, shell=shell
cmd=cmd,
encode_cmd=encode_cmd,
shell=shell,
runas=account.username,
password=account.password,
)
assert isinstance(ret["pid"], int)
assert ret["retcode"] == 0
assert ret["stderr"] == ""
assert ret["result"] == "encoded foo"
assert ret["result"] == expected
def test_cmd_run_all_powershell_list():
@pytest.mark.parametrize(
"cmd, expected, encoded_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
(
"VwByAGkAdABlAC0ASABvAHMAdAAgACcARQBuAGMAbwBkAGUAZAAgAEYAbwBvACcA",
"Encoded Foo",
True,
),
],
)
def test_cmd_run_all_powershell(shell, cmd, expected, encoded_cmd):
"""
Ensure that cmd.run_all supports running shell='powershell' with cmd passed
as a list
Ensure that cmd.run_all supports running shell='powershell'
"""
ret = cmdmod.run_all(cmd=cmd, shell=shell, encoded_cmd=encoded_cmd)
assert ret["stdout"] == expected
@pytest.mark.parametrize(
"cmd, expected, encoded_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
(
"VwByAGkAdABlAC0ASABvAHMAdAAgACcARQBuAGMAbwBkAGUAZAAgAEYAbwBvACcA",
"Encoded Foo",
True,
),
],
)
def test_cmd_run_all_powershell_runas(shell, account, cmd, expected, encoded_cmd):
"""
Ensure that cmd.run_all with runas supports running shell='powershell'
"""
ret = cmdmod.run_all(
cmd=["Write-Output", "salt"], python_shell=False, shell="powershell"
cmd=cmd,
shell=shell,
encoded_cmd=encoded_cmd,
runas=account.username,
password=account.password,
)
assert ret["stdout"] == "salt"
assert ret["stdout"] == expected
def test_cmd_run_all_powershell_string():
@pytest.mark.parametrize(
"cmd, expected, encoded_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
(
"VwByAGkAdABlAC0ASABvAHMAdAAgACcARQBuAGMAbwBkAGUAZAAgAEYAbwBvACcA",
"Encoded Foo",
True,
),
],
)
def test_cmd_run_encoded_cmd(shell, cmd, expected, encoded_cmd):
"""
Ensure that cmd.run_all supports running shell='powershell' with cmd passed
as a string
Ensure that cmd.run supports running shell='powershell'
"""
ret = cmdmod.run_all(
cmd="Write-Output salt", python_shell=False, shell="powershell"
ret = cmdmod.run(
cmd=cmd, shell=shell, encoded_cmd=encoded_cmd, redirect_stderr=False
)
assert ret["stdout"] == "salt"
assert ret == expected
def test_cmd_run_encoded_cmd(shell):
cmd = "Write-Output 'encoded command'"
cmd = f"$ProgressPreference='SilentlyContinue'; {cmd}"
cmd_utf16 = cmd.encode("utf-16-le")
encoded_cmd = base64.standard_b64encode(cmd_utf16)
encoded_cmd = salt.utils.stringutils.to_str(encoded_cmd)
ret = cmdmod.run(cmd=encoded_cmd, shell=shell, encoded_cmd=True)
assert ret == "encoded command"
def test_cmd_run_all_encoded_cmd(shell):
cmd = "Write-Output 'encoded command'"
cmd = f"$ProgressPreference='SilentlyContinue'; {cmd}"
cmd_utf16 = cmd.encode("utf-16-le")
encoded_cmd = base64.standard_b64encode(cmd_utf16)
encoded_cmd = salt.utils.stringutils.to_str(encoded_cmd)
ret = cmdmod.run_all(cmd=encoded_cmd, shell=shell, encoded_cmd=True)
assert ret["stdout"] == "encoded command"
@pytest.mark.parametrize(
"cmd, expected, encoded_cmd",
[
("Write-Output Foo", "Foo", False),
(["Write-Output", "Foo"], "Foo", False),
(
"VwByAGkAdABlAC0ASABvAHMAdAAgACcARQBuAGMAbwBkAGUAZAAgAEYAbwBvACcA",
"Encoded Foo",
True,
),
],
)
def test_cmd_run_encoded_cmd_runas(shell, account, cmd, expected, encoded_cmd):
"""
Ensure that cmd.run with runas supports running shell='powershell'
"""
ret = cmdmod.run(
cmd=cmd,
shell=shell,
encoded_cmd=encoded_cmd,
runas=account.username,
password=account.password,
)
assert ret == expected

View file

@ -0,0 +1,87 @@
import pytest
import salt.utils.path
pytestmark = [
pytest.mark.core_test,
pytest.mark.windows_whitelisted,
]
@pytest.fixture(scope="module")
def cmd(modules):
return modules.cmd
@pytest.fixture(params=["powershell", "pwsh"])
def shell(request):
"""
This will run the test on powershell and powershell core (pwsh). If
powershell core is not installed that test run will be skipped
"""
if request.param == "pwsh" and salt.utils.path.which("pwsh") is None:
pytest.skip("Powershell 7 Not Present")
return request.param
@pytest.fixture(scope="module")
def account():
with pytest.helpers.create_account() as _account:
yield _account
@pytest.fixture
def issue_56195(state_tree):
contents = """
[CmdLetBinding()]
Param(
[SecureString] $SecureString
)
$Credential = New-Object System.Net.NetworkCredential("DummyId", $SecureString)
$Credential.Password
"""
with pytest.helpers.temp_file("test.ps1", contents, state_tree / "issue-56195"):
yield
@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
def test_windows_script_args_powershell(cmd, shell, issue_56195):
"""
Ensure that powershell processes an inline script with args where the args
contain powershell that needs to be rendered
"""
password = "i like cheese"
args = (
"-SecureString (ConvertTo-SecureString -String '{}' -AsPlainText -Force)"
" -ErrorAction Stop".format(password)
)
script = "salt://issue-56195/test.ps1"
ret = cmd.script(source=script, args=args, shell="powershell", saltenv="base")
assert ret["stdout"] == password
@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
def test_windows_script_args_powershell_runas(cmd, shell, account, issue_56195):
"""
Ensure that powershell processes an inline script with args where the args
contain powershell that needs to be rendered
"""
password = "i like cheese"
args = (
"-SecureString (ConvertTo-SecureString -String '{}' -AsPlainText -Force)"
" -ErrorAction Stop".format(password)
)
script = "salt://issue-56195/test.ps1"
ret = cmd.script(
source=script,
args=args,
shell="powershell",
saltenv="base",
runas=account.username,
password=account.password,
)
assert ret["stdout"] == password

View file

@ -46,17 +46,19 @@ def test_bootstrap(chocolatey, clean):
# if the test fails
result = chocolatey.bootstrap()
# Let's run it outside the try/except to see what the error is
try:
chocolatey_version = chocolatey.chocolatey_version(refresh=True)
except CommandExecutionError:
chocolatey_version = None
chocolatey_version = chocolatey.chocolatey_version(refresh=True)
# try:
# chocolatey_version = chocolatey.chocolatey_version(refresh=True)
# except CommandExecutionError:
# chocolatey_version = None
assert chocolatey_version is not None
def test_bootstrap_version(chocolatey, clean):
chocolatey.bootstrap(version="1.4.0")
try:
chocolatey_version = chocolatey.chocolatey_version(refresh=True)
except CommandExecutionError:
chocolatey_version = None
chocolatey_version = chocolatey.chocolatey_version(refresh=True)
# try:
# chocolatey_version = chocolatey.chocolatey_version(refresh=True)
# except CommandExecutionError:
# chocolatey_version = None
assert chocolatey_version == "1.4.0"

View file

@ -123,7 +123,7 @@ def test_mod_del_repo(grains, modules):
try:
# ppa:otto-kesselgulasch/gimp-edge has no Ubuntu 22.04 repo
if grains["os"] == "Ubuntu" and grains["osmajorrelease"] != 22:
if grains["os"] == "Ubuntu" and grains["osmajorrelease"] < 22:
repo = "ppa:otto-kesselgulasch/gimp-edge"
uri = "http://ppa.launchpad.net/otto-kesselgulasch/gimp-edge/ubuntu"
ret = modules.pkg.mod_repo(repo, "comps=main")
@ -215,11 +215,15 @@ def test_mod_del_repo_multiline_values(modules):
@pytest.mark.requires_salt_modules("pkg.owner")
def test_owner(modules):
def test_owner(modules, grains):
"""
test finding the package owning a file
"""
ret = modules.pkg.owner("/bin/ls")
binary = "/bin/ls"
if grains["os"] == "Ubuntu" and grains["osmajorrelease"] >= 24:
binary = "/usr/bin/ls"
ret = modules.pkg.owner(binary)
assert len(ret) != 0
@ -230,11 +234,10 @@ def test_which(grains, modules):
"""
test finding the package owning a file
"""
if grains["os_family"] in ["Debian", "RedHat"]:
file = "/bin/mknod"
else:
file = "/usr/local/bin/salt-call"
ret = modules.pkg.which(file)
binary = "/bin/ls"
if grains["os"] == "Ubuntu" and grains["osmajorrelease"] >= 24:
binary = "/usr/bin/ls"
ret = modules.pkg.which(binary)
assert len(ret) != 0
@ -401,7 +404,7 @@ def test_pkg_upgrade_has_pending_upgrades(grains, modules):
Test running a system upgrade when there are packages that need upgrading
"""
if grains["os"] == "Arch":
pytest.skipTest("Arch moved to Python 3.8 and we're not ready for it yet")
pytest.skip("Arch moved to Python 3.8 and we're not ready for it yet")
modules.pkg.upgrade()
@ -439,7 +442,7 @@ def test_pkg_upgrade_has_pending_upgrades(grains, modules):
ret = modules.pkg.install(target, version=old)
if not isinstance(ret, dict):
if ret.startswith("ERROR"):
pytest.skipTest(f"Could not install older {target} to complete test.")
pytest.skip(f"Could not install older {target} to complete test.")
# Run a system upgrade, which should catch the fact that the
# targeted package needs upgrading, and upgrade it.
@ -453,7 +456,7 @@ def test_pkg_upgrade_has_pending_upgrades(grains, modules):
else:
ret = modules.pkg.list_upgrades()
if ret == "" or ret == {}:
pytest.skipTest(
pytest.skip(
"No updates available for this machine. Skipping pkg.upgrade test."
)
else:

View file

@ -1,6 +1,7 @@
import datetime
import logging
import os
import shutil
import signal
import subprocess
import textwrap
@ -106,6 +107,9 @@ def hwclock_has_compare(cmdmod):
systems where it's not present so that we can skip the
comparison portion of the test.
"""
hwclock = shutil.which("hwclock")
if hwclock is None:
pytest.skip("The 'hwclock' binary could not be found")
res = cmdmod.run_all(cmd="hwclock -h")
_hwclock_has_compare_ = res["retcode"] == 0 and res["stdout"].find("--compare") > 0
return _hwclock_has_compare_

View file

@ -320,19 +320,22 @@ def test_setpassword_int(user, account_int):
("logonscript", "\\\\server\\script.cmd", "", None),
("expiration_date", "3/19/2024", "", "2024-03-19 00:00:00"),
("expiration_date", "Never", "", None),
("expired", True, "", None),
("expired", False, "", None),
("account_disabled", True, "", None),
("account_disabled", False, "", None),
("unlock_account", True, "account_locked", False),
("password_never_expires", True, "", None),
("password_never_expires", False, "", None),
("expired", True, "", None),
("expired", False, "", None),
("disallow_change_password", True, "", None),
("disallow_change_password", False, "", None),
],
)
def test_update_str(user, value_name, new_value, info_field, expected, account_str):
setting = {value_name: new_value}
# You can't expire an account if the password never expires
if value_name == "expired":
setting.update({"password_never_expires": not new_value})
ret = user.update(account_str.username, **setting)
assert ret is True
ret = user.info(account_str.username)

View file

@ -4,10 +4,11 @@ import os
import pathlib
import shutil
import sys
from sysconfig import get_path
import sysconfig
import attr
import pytest
import requests
import salt.modules.aptpkg
import salt.utils.files
@ -123,7 +124,7 @@ def system_aptsources(request, grains):
"{}".format(*sys.version_info),
"{}.{}".format(*sys.version_info),
]
session_site_packages_dir = get_path(
session_site_packages_dir = sysconfig.get_path(
"purelib"
) # note: platlib and purelib could differ
session_site_packages_dir = os.path.relpath(
@ -649,6 +650,7 @@ class Repo:
key_file = attr.ib()
sources_list_file = attr.ib()
repo_file = attr.ib()
repo_url = attr.ib()
repo_content = attr.ib()
key_url = attr.ib()
@ -686,6 +688,10 @@ class Repo:
def _default_repo_file(self):
return self.sources_list_file
@repo_url.default
def _default_repo_url(self):
return f"https://repo.saltproject.io/py3/{self.fullname}/{self.grains['osrelease']}/{self.grains['osarch']}/latest"
@repo_content.default
def _default_repo_content(self):
if self.alt_repo:
@ -703,25 +709,26 @@ class Repo:
opts = "[arch={arch} signed-by=/usr/share/keyrings/salt-archive-keyring.gpg]".format(
arch=self.grains["osarch"]
)
repo_content = "deb {opts} https://repo.saltproject.io/py3/{}/{}/{arch}/latest {} main".format(
self.fullname,
self.grains["osrelease"],
self.grains["oscodename"],
arch=self.grains["osarch"],
opts=opts,
repo_content = (
f"deb {opts} {self.repo_url} {self.grains['oscodename']} main"
)
return repo_content
@key_url.default
def _default_key_url(self):
key_url = "https://repo.saltproject.io/py3/{}/{}/{}/latest/salt-archive-keyring.gpg".format(
self.fullname, self.grains["osrelease"], self.grains["osarch"]
)
key_url = f"{self.repo_url}/salt-archive-keyring.gpg"
if self.alt_repo:
key_url = "https://artifacts.elastic.co/GPG-KEY-elasticsearch"
return key_url
@property
def exists(self):
"""
Return True if the repository path exists.
"""
response = requests.head(self.key_url, timeout=30)
return response.status_code == 200
@pytest.fixture
def repo(request, grains, sources_list_file):
@ -729,10 +736,14 @@ def repo(request, grains, sources_list_file):
if "signedby" in request.node.name:
signedby = True
repo = Repo(grains=grains, sources_list_file=sources_list_file, signedby=signedby)
yield repo
for key in [repo.key_file, repo.key_file.parent / "salt-alt-key.gpg"]:
if key.is_file():
key.unlink()
if not repo.exists:
pytest.skip(f"The repo url '{repo.repo_url}' does not exist")
try:
yield repo
finally:
for key in [repo.key_file, repo.key_file.parent / "salt-alt-key.gpg"]:
if key.is_file():
key.unlink()
def test_adding_repo_file_signedby(pkgrepo, states, repo, subtests):

View file

@ -11,8 +11,8 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.requires_sshd_server,
pytest.mark.skipif(
"grains['osfinger'] == 'Fedora Linux-39'",
reason="Fedora 39 ships with Python 3.12. Test can't run with system Python on 3.12",
'grains["osfinger"].startswith(("Fedora Linux-40", "Ubuntu-24.04", "Arch Linux"))',
reason="System ships with a version of python that is too recent for salt-ssh tests",
# Actually, the problem is that the tornado we ship is not prepared for Python 3.12,
# and it imports `ssl` and checks if the `match_hostname` function is defined, which
# has been deprecated since Python 3.7, so, the logic goes into trying to import

View file

@ -4,10 +4,13 @@ from tests.support.pytest.helpers import reap_stray_processes
@pytest.fixture(scope="package", autouse=True)
def _auto_skip_on_fedora_39(grains):
if grains["osfinger"] == "Fedora Linux-39":
def _auto_skip_on_system_python_too_recent(grains):
if (
grains["osfinger"] in ("Fedora Linux-40", "Ubuntu-24.04")
or grains["os_family"] == "Arch"
):
pytest.skip(
"Fedora 39 ships with Python 3.12. Test can't run with system Python on 3.12"
"System ships with a version of python that is too recent for salt-ssh tests",
# Actually, the problem is that the tornado we ship is not prepared for Python 3.12,
# and it imports `ssl` and checks if the `match_hostname` function is defined, which
# has been deprecated since Python 3.7, so, the logic goes into trying to import
@ -16,7 +19,7 @@ def _auto_skip_on_fedora_39(grains):
@pytest.fixture(autouse=True)
def _reap_stray_processes(grains):
def _reap_stray_processes():
# when tests timeout, we migth leave child processes behind
# nuke them
with reap_stray_processes():

View file

@ -21,8 +21,7 @@ def test_system_config(grains):
"CentOS Stream-8",
"CentOS Linux-8",
"CentOS Stream-9",
"Fedora Linux-36",
"VMware Photon OS-3",
"Fedora Linux-40",
"VMware Photon OS-4",
"VMware Photon OS-5",
"Amazon Linux-2023",

View file

@ -310,7 +310,7 @@ def test_powershell_empty():
mock_run = {"pid": 1234, "retcode": 0, "stderr": "", "stdout": ""}
with patch("salt.modules.cmdmod._run", return_value=mock_run):
ret = cmdmod.powershell("Set-ExecutionPolicy RemoteSigned")
assert ret == {}
assert ret == ""
def test_is_valid_shell_windows():
@ -1052,57 +1052,97 @@ def test_runas_env_sudo_group(bundled):
)
def test_prep_powershell_cmd_no_powershell():
with pytest.raises(CommandExecutionError):
cmdmod._prep_powershell_cmd(
win_shell="unk_bin", cmd="Some-Command", encoded_cmd=False
)
def test_prep_powershell_cmd():
"""
Tests _prep_powershell_cmd returns correct cmd
"""
with patch("salt.utils.platform.is_windows", MagicMock(return_value=False)):
stack = [["", "", ""], ["", "", ""], ["", "", ""]]
stack = [["", "", ""], ["", "", ""], ["", "", ""], ["", "", ""]]
with patch("traceback.extract_stack", return_value=stack), patch(
"salt.utils.path.which", return_value="C:\\powershell.exe"
):
ret = cmdmod._prep_powershell_cmd(
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
win_shell="powershell", cmd="$PSVersionTable", encoded_cmd=False
)
assert ret == 'powershell -NonInteractive -NoProfile -Command "$PSVersionTable"'
expected = [
"C:\\powershell.exe",
"-NonInteractive",
"-NoProfile",
"-ExecutionPolicy",
"Bypass",
"-Command",
"& {$PSVersionTable}",
]
assert ret == expected
def test_prep_powershell_cmd_encoded():
"""
Tests _prep_powershell_cmd returns correct cmd when encoded_cmd=True
"""
stack = [["", "", ""], ["", "", ""], ["", "", ""], ["", "", ""]]
# This is the encoded command for 'Write-Host "Encoded HOLO"'
e_cmd = "VwByAGkAdABlAC0ASABvAHMAdAAgACIARQBuAGMAbwBkAGUAZAAgAEgATwBMAE8AIgA="
with patch("traceback.extract_stack", return_value=stack), patch(
"salt.utils.path.which", return_value="C:\\powershell.exe"
):
ret = cmdmod._prep_powershell_cmd(
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=True
)
assert (
ret
== "powershell -NonInteractive -NoProfile -EncodedCommand $PSVersionTable"
win_shell="powershell", cmd=e_cmd, encoded_cmd=True
)
expected = [
"C:\\powershell.exe",
"-NonInteractive",
"-NoProfile",
"-ExecutionPolicy",
"Bypass",
"-EncodedCommand",
f"{e_cmd}",
]
assert ret == expected
stack = [["", "", ""], ["", "", "script"], ["", "", ""]]
def test_prep_powershell_cmd_script():
"""
Tests _prep_powershell_cmd returns correct cmd when called from cmd.script
"""
stack = [["", "", ""], ["", "", "script"], ["", "", ""], ["", "", ""]]
script = r"C:\some\script.ps1"
with patch("traceback.extract_stack", return_value=stack), patch(
"salt.utils.path.which", return_value="C:\\powershell.exe"
):
ret = cmdmod._prep_powershell_cmd(
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
)
assert (
ret
== "powershell -NonInteractive -NoProfile -ExecutionPolicy Bypass -Command $PSVersionTable"
win_shell="powershell", cmd=script, encoded_cmd=False
)
expected = [
"C:\\powershell.exe",
"-NonInteractive",
"-NoProfile",
"-ExecutionPolicy",
"Bypass",
"-Command",
f"& {script}",
]
assert ret == expected
with patch("salt.utils.platform.is_windows", MagicMock(return_value=True)):
stack = [["", "", ""], ["", "", ""], ["", "", ""]]
ret = cmdmod._prep_powershell_cmd(
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
)
assert (
ret == '"powershell" -NonInteractive -NoProfile -Command "$PSVersionTable"'
)
ret = cmdmod._prep_powershell_cmd(
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=True
)
assert (
ret
== '"powershell" -NonInteractive -NoProfile -EncodedCommand $PSVersionTable'
)
stack = [["", "", ""], ["", "", "script"], ["", "", ""]]
ret = cmdmod._prep_powershell_cmd(
shell="powershell", cmd="$PSVersionTable", stack=stack, encoded_cmd=False
)
assert (
ret
== '"powershell" -NonInteractive -NoProfile -ExecutionPolicy Bypass -Command $PSVersionTable'
)
@pytest.mark.parametrize(
"text, expected",
[
("", '""'), # Should quote an empty string
("Foo", '"Foo"'), # Should quote a string
('["foo", "bar"]', '["foo", "bar"]'), # Should leave unchanged
('{"foo": "bar"}', '{"foo": "bar"}'), # Should leave unchanged
],
)
def test_prep_powershell_json(text, expected):
"""
Make sure the output is valid json
"""
result = cmdmod._prep_powershell_json(text)
assert result == expected

View file

@ -4,7 +4,7 @@ import pytest
import salt.modules.kmod as kmod
from salt.exceptions import CommandExecutionError
from tests.support.mock import MagicMock, patch
from tests.support.mock import MagicMock, mock_open, patch
@pytest.fixture
@ -17,7 +17,7 @@ def test_available():
Tests return a list of all available kernel modules
"""
with patch("salt.modules.kmod.available", MagicMock(return_value=["kvm"])):
assert ["kvm"] == kmod.available()
assert kmod.available() == ["kvm"]
def test_check_available():
@ -42,7 +42,7 @@ def test_lsmod():
), patch.dict(kmod.__salt__, {"cmd.run": mock_cmd}):
with pytest.raises(CommandExecutionError):
kmod.lsmod()
assert expected == kmod.lsmod()
assert kmod.lsmod() == expected
@pytest.mark.skipif(
@ -55,15 +55,12 @@ def test_mod_list():
with patch(
"salt.modules.kmod._get_modules_conf",
MagicMock(return_value="/etc/modules"),
):
with patch(
"salt.modules.kmod._strip_module_name", MagicMock(return_value="lp")
):
assert ["lp"] == kmod.mod_list(True)
), patch("salt.utils.files.fopen", mock_open(read_data="lp")):
assert kmod.mod_list(True) == ["lp"]
mock_ret = [{"size": 100, "module": None, "depcount": 10, "deps": None}]
with patch("salt.modules.kmod.lsmod", MagicMock(return_value=mock_ret)):
assert [None] == kmod.mod_list(False)
assert kmod.mod_list(False) == [None]
def test_load():
@ -90,10 +87,10 @@ def test_load():
kmod.load(mod, True)
with patch.dict(kmod.__salt__, {"cmd.run_all": mock_run_all_0}):
assert [mod] == kmod.load(mod, True)
assert kmod.load(mod, True) == [mod]
with patch.dict(kmod.__salt__, {"cmd.run_all": mock_run_all_1}):
assert f"Error loading module {mod}: {err_msg}" == kmod.load(mod)
assert kmod.load(mod) == f"Error loading module {mod}: {err_msg}"
def test_is_loaded():
@ -126,11 +123,11 @@ def test_remove():
with pytest.raises(CommandExecutionError):
kmod.remove(mod)
assert [mod] == kmod.remove(mod, True)
assert kmod.remove(mod, True) == [mod]
assert [] == kmod.remove(mod)
assert kmod.remove(mod) == []
with patch.dict(kmod.__salt__, {"cmd.run_all": mock_run_all_1}):
assert "Error removing module {}: {}".format(
mod, err_msg
) == kmod.remove(mod, True)
assert (
kmod.remove(mod, True) == f"Error removing module {mod}: {err_msg}"
)

View file

@ -103,6 +103,26 @@ def test_build_rule():
"comment": "Successfully built rule",
}
assert nftables.build_rule(
table="filter",
chain="input",
family="ip6",
command="insert",
position="3",
full="True",
connstate="related,established",
saddr="::/0",
daddr="fe80:cafe::1",
jump="accept",
) == {
"result": True,
"rule": (
"nft insert rule ip6 filter input position 3 ct state {"
" related,established } ip6 saddr ::/0 ip6 daddr fe80:cafe::1 accept"
),
"comment": "Successfully built rule",
}
assert nftables.build_rule() == {"result": True, "rule": "", "comment": ""}

View file

@ -1,5 +1,5 @@
import datetime
import ipaddress
from datetime import datetime, timedelta, timezone
import pytest
@ -11,6 +11,9 @@ cryptography = pytest.importorskip(
"cryptography", reason="Needs cryptography library", minversion="37.0"
)
cx509 = pytest.importorskip("cryptography.x509", reason="Needs cryptography library")
cprim = pytest.importorskip(
"cryptography.hazmat.primitives", reason="Needs cryptography library"
)
@pytest.fixture
@ -1019,12 +1022,12 @@ class TestCreateExtension:
[
(
"critical, 2022-10-11 13:37:42",
datetime.datetime.strptime("2022-10-11 13:37:42", "%Y-%m-%d %H:%M:%S"),
datetime.strptime("2022-10-11 13:37:42", "%Y-%m-%d %H:%M:%S"),
True,
),
(
"2022-10-11 13:37:42",
datetime.datetime.strptime("2022-10-11 13:37:42", "%Y-%m-%d %H:%M:%S"),
datetime.strptime("2022-10-11 13:37:42", "%Y-%m-%d %H:%M:%S"),
False,
),
],
@ -1875,9 +1878,7 @@ def test_get_dn(inpt, expected):
cx509.Extension(
cx509.InvalidityDate.oid,
value=cx509.InvalidityDate(
datetime.datetime.strptime(
"2022-10-11 13:37:42", "%Y-%m-%d %H:%M:%S"
)
datetime.strptime("2022-10-11 13:37:42", "%Y-%m-%d %H:%M:%S")
),
critical=False,
),
@ -1888,3 +1889,86 @@ def test_get_dn(inpt, expected):
def test_render_extension(inpt, expected):
ret = x509.render_extension(inpt)
assert ret == expected
@pytest.fixture
def ca_cert():
return """\
-----BEGIN CERTIFICATE-----
MIIDODCCAiCgAwIBAgIIbfpgqP0VGPgwDQYJKoZIhvcNAQELBQAwKzELMAkGA1UE
BhMCVVMxDTALBgNVBAMMBFRlc3QxDTALBgNVBAoMBFNhbHQwHhcNMjIxMTE1MTQw
NDMzWhcNMzIxMTEyMTQwNDMzWjArMQswCQYDVQQGEwJVUzENMAsGA1UEAwwEVGVz
dDENMAsGA1UECgwEU2FsdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
AOGTScvrjcEt6vsJcG9RUp6fKaDNDWZnJET0omanK9ZwaoGpJPp8UDYe/8ADeI7N
10wdyB4oDM9gRDjInBtdQO/PsrmKZF6LzqVFgLMxu2up+PHMi9z6B2P4esIAzMu9
PYxc9zH4HzLImHqscVD2HCabsjp9X134Af7hVY5NN/W/4qTP7uOM20wSG2TPI6+B
tA9VyPbEPMPRzXzrqc45rVYe6kb2bT84GE93Vcu/e5JZ/k2AKD8Hoa2cxLPsTLq5
igl+D+k+dfUtiABiKPvVQiYBsD1fyHDn2m7B6pCgvrGqHjsoAKufgFnXy6PJRg7n
vQfaxSiusM5s+VS+fjlvgwsCAwEAAaNgMF4wDwYDVR0TBAgwBgEB/wIBATALBgNV
HQ8EBAMCAQYwHQYDVR0OBBYEFFzy8fRTKSOe7kBakqO0Ki71potnMB8GA1UdIwQY
MBaAFFzy8fRTKSOe7kBakqO0Ki71potnMA0GCSqGSIb3DQEBCwUAA4IBAQBZS4MP
fXYPoGZ66seM+0eikScZHirbRe8vHxHkujnTBUjQITKm86WeQgeBCD2pobgBGZtt
5YFozM4cERqY7/1BdemUxFvPmMFFznt0TM5w+DfGWVK8un6SYwHnmBbnkWgX4Srm
GsL0HHWxVXkGnFGFk6Sbo3vnN7CpkpQTWFqeQQ5rHOw91pt7KnNZwc6I3ZjrCUHJ
+UmKKrga16a4Q+8FBpYdphQU609npo/0zuaE6FyiJYlW3tG+mlbbNgzY/+eUaxt2
9Bp9mtA+Hkox551Mfpq45Oi+ehwMt0xjZCjuFCM78oiUdHCGO+EmcT7ogiYALiOF
LN1w5sybsYwIw6QN
-----END CERTIFICATE-----
"""
@pytest.fixture
def ca_key():
return """\
-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEA4ZNJy+uNwS3q+wlwb1FSnp8poM0NZmckRPSiZqcr1nBqgakk
+nxQNh7/wAN4js3XTB3IHigMz2BEOMicG11A78+yuYpkXovOpUWAszG7a6n48cyL
3PoHY/h6wgDMy709jFz3MfgfMsiYeqxxUPYcJpuyOn1fXfgB/uFVjk039b/ipM/u
44zbTBIbZM8jr4G0D1XI9sQ8w9HNfOupzjmtVh7qRvZtPzgYT3dVy797kln+TYAo
PwehrZzEs+xMurmKCX4P6T519S2IAGIo+9VCJgGwPV/IcOfabsHqkKC+saoeOygA
q5+AWdfLo8lGDue9B9rFKK6wzmz5VL5+OW+DCwIDAQABAoIBAFfImc9hu6iR1gAb
jEXFwAE6r1iEc9KGEPdEvG52X/jzhn8u89UGy7BEIAL5VtE8Caz1agtSSqnpLKNs
blO31q18hnDuCmFAxwpKIeuaTvV3EAoJL+Su6HFfIWaeKRSgcHNPOmOXy4xXw/75
XJ/FJu9fZ9ybLaHEAgLObh0Sr9RSPQbZ72ZawPP8+5WCbR+2w90RApHXQL0piSbW
lIx1NE6o5wQb3vik8z/k5FqLCY2a8++WNyfvS+WWFY5WXGI7ZiDDQk46gnslquH2
Lon5CEn3JlTGQFhxaaa2ivssscf2lA2Rvm2E8o1rdZJS2OpSE0ai4TXY9XnyjZj1
5usWIwECgYEA+3Mwu03A7PyLEBksS/u3MSo/176S9lF/uXcecQNdhAIalUZ8AgV3
7HP2yI9ZC0ekA809ZzFjGFostXm9VfUOEZ549jLOMzvBtCdaI0aBUE8icu52fX4r
fT2NY6hYgz5/fxD8sq1XH/fqNNexABwtViH6YAly/9A1/8M3BOWt72UCgYEA5ag8
sIfiBUoWd1sS6qHDuugWlpx4ZWYC/59XEJyCN2wioP8qFji/aNZxF1wLfyQe/zaa
YBFusjsBnSfBU1p4UKCRHWQ9/CnC0DzqTkyKC4Fv8GuxgywNm5W9gPKk7idHP7mw
e+7Uvf1pOQccqEPh7yltpW+Xw27gfsC2DMAIGa8CgYByv/q5P56PiCCeVB6W/mR3
l2RTPLEsn7y+EtJdmL+QgrVG8kedVImJ6tHwbRqhvyvmYD9pXGxwrJZCqy/wjkjB
WaSyFjVrxBV99Yd5Ga/hyntaH+ELHA0UtoZTuHvMSTU9866ei+R6vlSvkM9B0ZoO
+KqeMTG99HLwKVJudbKO0QKBgQCd33U49XBOqoufKSBr4yAmUH2Ws6GgMuxExUiY
xr5NUyzK+B36gLA0ZZYAtOnCURZt4x9kgxdRtnZ5jma74ilrY7XeOpbRzfN6KyX3
BW6wUh6da6rvvUztc5Z+Gk9+18mG6SOFTr04jgfTiCwPD/s06YnSfFAbrRDukZOU
WD45SQKBgBvjSwl3AbPoJnRjZjGuCUMKQKrLm30xCeorxasu+di/4YV5Yd8VUjaO
mYyqXW6bQndKLuXT+AXtCd/Xt2sI96z8mc0G5fImDUxQjMUuS3RyQK357cEOu8Zy
HdI7Pfaf/l0HozAw/Al+LXbpmSBdfmz0U/EGAKRqXMW5+vQ7XHXD
-----END RSA PRIVATE KEY-----"""
def test_build_crl_accounts_for_local_time_zone(ca_key, ca_cert):
curr_time = datetime.now(tz=timezone(timedelta(hours=1)))
curr_time_naive = curr_time.replace(tzinfo=None)
def dtn(tz=None):
if tz is None:
return curr_time_naive
return curr_time
curr_time_utc = curr_time.astimezone(timezone.utc).replace(microsecond=0)
curr_time_utc_naive = curr_time_utc.replace(tzinfo=None)
privkey = cprim.serialization.load_pem_private_key(ca_key.encode(), password=None)
cert = cx509.load_pem_x509_certificate(ca_cert.encode())
with patch("salt.utils.x509.datetime") as fakedate:
fakedate.today.return_value = curr_time_naive
fakedate.now.side_effect = dtn
fakedate.utcnow.return_value = curr_time_utc_naive
builder, _ = x509.build_crl(privkey, [], signing_cert=cert)
crl = builder.sign(privkey, algorithm=cprim.hashes.SHA256())
try:
assert crl.last_update_utc == curr_time_utc
except AttributeError:
assert crl.last_update == curr_time_utc_naive

View file

@ -2,7 +2,6 @@ import pytest
import salt.utils.win_lgpo_netsh as win_lgpo_netsh
from salt.exceptions import CommandExecutionError
from tests.support.mock import patch
pytestmark = [
pytest.mark.windows_whitelisted,
@ -26,18 +25,6 @@ def test_get_settings_firewallpolicy_lgpo():
assert "Outbound" in ret
def test_get_settings_firewallpolicy_lgpo_issue_57591():
"""
Should not stacktrace when the hostname contains unicode characters
"""
with patch.object(win_lgpo_netsh, "__hostname__", return_value="kомпьютер"):
ret = win_lgpo_netsh.get_settings(
profile="domain", section="firewallpolicy", store="lgpo"
)
assert "Inbound" in ret
assert "Outbound" in ret
def test_get_settings_logging_local():
ret = win_lgpo_netsh.get_settings(
profile="domain", section="logging", store="local"
@ -63,7 +50,6 @@ def test_get_settings_settings_local():
assert "InboundUserNotification" in ret
assert "LocalConSecRules" in ret
assert "LocalFirewallRules" in ret
assert "RemoteManagement" in ret
assert "UnicastResponseToMulticast" in ret
@ -74,7 +60,6 @@ def test_get_settings_settings_lgpo():
assert "InboundUserNotification" in ret
assert "LocalConSecRules" in ret
assert "LocalFirewallRules" in ret
assert "RemoteManagement" in ret
assert "UnicastResponseToMulticast" in ret
@ -99,7 +84,6 @@ def test_get_all_settings_local():
assert "InboundUserNotification" in ret
assert "LocalConSecRules" in ret
assert "LocalFirewallRules" in ret
assert "RemoteManagement" in ret
assert "UnicastResponseToMulticast" in ret
assert "State" in ret
@ -115,7 +99,6 @@ def test_get_all_settings_lgpo():
assert "InboundUserNotification" in ret
assert "LocalConSecRules" in ret
assert "LocalFirewallRules" in ret
assert "RemoteManagement" in ret
assert "UnicastResponseToMulticast" in ret
assert "State" in ret
@ -356,7 +339,7 @@ def test_set_firewall_logging_maxfilesize_local():
new = win_lgpo_netsh.get_settings(
profile="domain", section="logging", store="local"
)["MaxFileSize"]
assert new == "16384"
assert new == 16384
finally:
ret = win_lgpo_netsh.set_logging_settings(
profile="domain", setting="maxfilesize", value=current, store="local"
@ -491,32 +474,6 @@ def test_set_firewall_settings_notification_lgpo_notconfigured():
assert ret is True
def test_set_firewall_settings_remotemgmt_local_enable():
current = win_lgpo_netsh.get_settings(
profile="domain", section="settings", store="local"
)["RemoteManagement"]
try:
ret = win_lgpo_netsh.set_settings(
profile="domain",
setting="remotemanagement",
value="enable",
store="local",
)
assert ret is True
new = win_lgpo_netsh.get_settings(
profile="domain", section="settings", store="local"
)["RemoteManagement"]
assert new == "Enable"
finally:
ret = win_lgpo_netsh.set_settings(
profile="domain",
setting="remotemanagement",
value=current,
store="local",
)
assert ret is True
def test_set_firewall_settings_unicast_local_disable():
current = win_lgpo_netsh.get_settings(
profile="domain", section="settings", store="local"
@ -566,13 +523,16 @@ def test_set_firewall_state_local_notconfigured():
profile="domain", section="state", store="local"
)["State"]
try:
pytest.raises(
CommandExecutionError,
win_lgpo_netsh.set_state,
ret = win_lgpo_netsh.set_state(
profile="domain",
state="notconfigured",
store="local",
)
assert ret is True
new = win_lgpo_netsh.get_settings(
profile="domain", section="state", store="local"
)["State"]
assert new == "NotConfigured"
finally:
ret = win_lgpo_netsh.set_state(profile="domain", state=current, store="local")
assert ret is True

View file

@ -330,6 +330,11 @@ class TestAccount:
ret = self.sminion.functions.user.add(self.username)
assert ret is True
self._delete_account = True
if salt.utils.platform.is_windows():
log.debug("Configuring system account: %s", self)
ret = self.sminion.functions.user.update(
self.username, password_never_expires=True
)
if salt.utils.platform.is_darwin() or salt.utils.platform.is_windows():
password = self.password
else:

View file

@ -38,11 +38,6 @@ create = command_group(
_deb_distro_info = {
"debian": {
"10": {
"label": "deb10ary",
"codename": "buster",
"suitename": "oldoldstable",
},
"11": {
"label": "deb11ary",
"codename": "bullseye",
@ -63,9 +58,9 @@ _deb_distro_info = {
"label": "salt_ubuntu2204",
"codename": "jammy",
},
"23.04": {
"label": "salt_ubuntu2304",
"codename": "lunar",
"24.04": {
"label": "salt_ubuntu2404",
"codename": "noble",
},
},
}
@ -316,8 +311,8 @@ def debian(
_rpm_distro_info = {
"amazon": ["2", "2023"],
"redhat": ["7", "8", "9"],
"fedora": ["36", "37", "38", "39"],
"photon": ["3", "4", "5"],
"fedora": ["40"],
"photon": ["4", "5"],
}

View file

@ -361,7 +361,6 @@ MISSING_DOCSTRINGS = {
"machine_get_machinestate_tuple",
],
"salt/utils/win_osinfo.py": ["get_os_version_info"],
"salt/utils/win_runas.py": ["split_username"],
"salt/utils/yamldumper.py": [
"represent_undefined",
"represent_ordereddict",

View file

@ -57,7 +57,7 @@ TEST_SALT_LISTING = PlatformDefinitions(
Linux(slug="debian-11-arm64", display_name="Debian 11 Arm64", arch="arm64"),
Linux(slug="debian-12", display_name="Debian 12", arch="x86_64"),
Linux(slug="debian-12-arm64", display_name="Debian 12 Arm64", arch="arm64"),
Linux(slug="fedora-39", display_name="Fedora 39", arch="x86_64"),
Linux(slug="fedora-40", display_name="Fedora 40", arch="x86_64"),
Linux(slug="opensuse-15", display_name="Opensuse 15", arch="x86_64"),
Linux(slug="photonos-4", display_name="Photon OS 4", arch="x86_64"),
Linux(
@ -103,6 +103,12 @@ TEST_SALT_LISTING = PlatformDefinitions(
display_name="Ubuntu 22.04 Arm64",
arch="arm64",
),
Linux(slug="ubuntu-24.04", display_name="Ubuntu 24.04", arch="x86_64"),
Linux(
slug="ubuntu-24.04-arm64",
display_name="Ubuntu 24.04 Arm64",
arch="arm64",
),
],
"macos": [
MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"),
@ -345,6 +351,18 @@ def generate_workflows(ctx: Context):
arch="arm64",
pkg_type="deb",
),
Linux(
slug="ubuntu-24.04",
display_name="Ubuntu 24.04",
arch="x86_64",
pkg_type="deb",
),
Linux(
slug="ubuntu-24.04-arm64",
display_name="Ubuntu 24.04 Arm64",
arch="arm64",
pkg_type="deb",
),
],
"macos": [
MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"),