mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch 'master' into remove-azure
This commit is contained in:
commit
a34d42b276
38 changed files with 2921 additions and 2817 deletions
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
|
@ -39,6 +39,7 @@ jobs:
|
|||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
||||
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
@ -243,6 +244,11 @@ jobs:
|
|||
id: set-cache-seed
|
||||
run: |
|
||||
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get Release Changelog Target
|
||||
id: get-release-changelog-target
|
||||
run: |
|
||||
tools ci get-release-changelog-target ${{ github.event_name }}
|
||||
pre-commit:
|
||||
name: Pre-Commit
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
|
@ -320,7 +326,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --next-release --template-only
|
||||
else
|
||||
tools changelog update-release-notes --template-only
|
||||
|
@ -330,7 +336,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --draft --next-release
|
||||
tools changelog update-release-notes --next-release
|
||||
else
|
||||
|
@ -1132,25 +1138,6 @@ jobs:
|
|||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
fedora-36:
|
||||
name: Fedora 36
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: ${{ github.event_name == 'pull_request' }}
|
||||
skip-junit-reports: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
|
@ -1349,7 +1336,6 @@ jobs:
|
|||
- debian-10
|
||||
- debian-11
|
||||
- debian-11-arm64
|
||||
- fedora-36
|
||||
- fedora-37
|
||||
- fedora-38
|
||||
- opensuse-15
|
||||
|
|
30
.github/workflows/nightly.yml
vendored
30
.github/workflows/nightly.yml
vendored
|
@ -92,6 +92,7 @@ jobs:
|
|||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
||||
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
@ -296,6 +297,11 @@ jobs:
|
|||
id: set-cache-seed
|
||||
run: |
|
||||
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get Release Changelog Target
|
||||
id: get-release-changelog-target
|
||||
run: |
|
||||
tools ci get-release-changelog-target ${{ github.event_name }}
|
||||
pre-commit:
|
||||
name: Pre-Commit
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
|
@ -373,7 +379,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --next-release --template-only
|
||||
else
|
||||
tools changelog update-release-notes --template-only
|
||||
|
@ -383,7 +389,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --draft --next-release
|
||||
tools changelog update-release-notes --next-release
|
||||
else
|
||||
|
@ -1191,25 +1197,6 @@ jobs:
|
|||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
fedora-36:
|
||||
name: Fedora 36
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
|
@ -2040,7 +2027,6 @@ jobs:
|
|||
- debian-10
|
||||
- debian-11
|
||||
- debian-11-arm64
|
||||
- fedora-36
|
||||
- fedora-37
|
||||
- fedora-38
|
||||
- opensuse-15
|
||||
|
|
40
.github/workflows/release.yml
vendored
40
.github/workflows/release.yml
vendored
|
@ -505,44 +505,6 @@ jobs:
|
|||
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
|
||||
secrets: inherit
|
||||
|
||||
fedora-36-pkg-download-tests:
|
||||
name: Test Fedora 36 Package Downloads
|
||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- publish-repositories
|
||||
- download-onedir-artifact
|
||||
uses: ./.github/workflows/test-package-downloads-action-linux.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
|
||||
secrets: inherit
|
||||
|
||||
fedora-36-arm64-pkg-download-tests:
|
||||
name: Test Fedora 36 Arm64 Package Downloads
|
||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- publish-repositories
|
||||
- download-onedir-artifact
|
||||
uses: ./.github/workflows/test-package-downloads-action-linux.yml
|
||||
with:
|
||||
distro-slug: fedora-36-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: release
|
||||
skip-code-coverage: true
|
||||
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
|
||||
secrets: inherit
|
||||
|
||||
fedora-37-pkg-download-tests:
|
||||
name: Test Fedora 37 Package Downloads
|
||||
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
|
||||
|
@ -818,8 +780,6 @@ jobs:
|
|||
- debian-10-pkg-download-tests
|
||||
- debian-11-pkg-download-tests
|
||||
- debian-11-arm64-pkg-download-tests
|
||||
- fedora-36-pkg-download-tests
|
||||
- fedora-36-arm64-pkg-download-tests
|
||||
- fedora-37-pkg-download-tests
|
||||
- fedora-37-arm64-pkg-download-tests
|
||||
- fedora-38-pkg-download-tests
|
||||
|
|
30
.github/workflows/scheduled.yml
vendored
30
.github/workflows/scheduled.yml
vendored
|
@ -82,6 +82,7 @@ jobs:
|
|||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
||||
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
@ -286,6 +287,11 @@ jobs:
|
|||
id: set-cache-seed
|
||||
run: |
|
||||
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get Release Changelog Target
|
||||
id: get-release-changelog-target
|
||||
run: |
|
||||
tools ci get-release-changelog-target ${{ github.event_name }}
|
||||
pre-commit:
|
||||
name: Pre-Commit
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
|
@ -363,7 +369,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --next-release --template-only
|
||||
else
|
||||
tools changelog update-release-notes --template-only
|
||||
|
@ -373,7 +379,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --draft --next-release
|
||||
tools changelog update-release-notes --next-release
|
||||
else
|
||||
|
@ -1175,25 +1181,6 @@ jobs:
|
|||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
fedora-36:
|
||||
name: Fedora 36
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: false
|
||||
skip-junit-reports: false
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
|
@ -1394,7 +1381,6 @@ jobs:
|
|||
- debian-10
|
||||
- debian-11
|
||||
- debian-11-arm64
|
||||
- fedora-36
|
||||
- fedora-37
|
||||
- fedora-38
|
||||
- opensuse-15
|
||||
|
|
68
.github/workflows/staging.yml
vendored
68
.github/workflows/staging.yml
vendored
|
@ -72,6 +72,7 @@ jobs:
|
|||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
||||
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
@ -282,6 +283,11 @@ jobs:
|
|||
id: set-cache-seed
|
||||
run: |
|
||||
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get Release Changelog Target
|
||||
id: get-release-changelog-target
|
||||
run: |
|
||||
tools ci get-release-changelog-target ${{ github.event_name }}
|
||||
pre-commit:
|
||||
name: Pre-Commit
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
|
||||
|
@ -363,7 +369,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --next-release --template-only
|
||||
else
|
||||
tools changelog update-release-notes --template-only
|
||||
|
@ -373,7 +379,7 @@ jobs:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --draft --release --next-release
|
||||
tools changelog update-release-notes --release --next-release
|
||||
else
|
||||
|
@ -1181,25 +1187,6 @@ jobs:
|
|||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
fedora-36:
|
||||
name: Fedora 36
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- build-salt-onedir
|
||||
uses: ./.github/workflows/test-action.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
nox-session: ci-test-onedir
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }}
|
||||
skip-code-coverage: true
|
||||
skip-junit-reports: true
|
||||
|
||||
fedora-37:
|
||||
name: Fedora 37
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
|
@ -2388,42 +2375,6 @@ jobs:
|
|||
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
|
||||
secrets: inherit
|
||||
|
||||
fedora-36-pkg-download-tests:
|
||||
name: Test Fedora 36 Package Downloads
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- publish-repositories
|
||||
uses: ./.github/workflows/test-package-downloads-action-linux.yml
|
||||
with:
|
||||
distro-slug: fedora-36
|
||||
platform: linux
|
||||
arch: x86_64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
|
||||
secrets: inherit
|
||||
|
||||
fedora-36-arm64-pkg-download-tests:
|
||||
name: Test Fedora 36 Arm64 Package Downloads
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
needs:
|
||||
- prepare-workflow
|
||||
- publish-repositories
|
||||
uses: ./.github/workflows/test-package-downloads-action-linux.yml
|
||||
with:
|
||||
distro-slug: fedora-36-arm64
|
||||
platform: linux
|
||||
arch: aarch64
|
||||
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11
|
||||
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
|
||||
environment: staging
|
||||
skip-code-coverage: true
|
||||
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
|
||||
secrets: inherit
|
||||
|
||||
fedora-37-pkg-download-tests:
|
||||
name: Test Fedora 37 Package Downloads
|
||||
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
|
||||
|
@ -2680,7 +2631,6 @@ jobs:
|
|||
- debian-10
|
||||
- debian-11
|
||||
- debian-11-arm64
|
||||
- fedora-36
|
||||
- fedora-37
|
||||
- fedora-38
|
||||
- opensuse-15
|
||||
|
@ -2725,8 +2675,6 @@ jobs:
|
|||
- debian-10-pkg-download-tests
|
||||
- debian-11-pkg-download-tests
|
||||
- debian-11-arm64-pkg-download-tests
|
||||
- fedora-36-pkg-download-tests
|
||||
- fedora-36-arm64-pkg-download-tests
|
||||
- fedora-37-pkg-download-tests
|
||||
- fedora-37-arm64-pkg-download-tests
|
||||
- fedora-38-pkg-download-tests
|
||||
|
|
4
.github/workflows/templates/ci.yml.jinja
vendored
4
.github/workflows/templates/ci.yml.jinja
vendored
|
@ -116,7 +116,7 @@ on:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --next-release --template-only
|
||||
else
|
||||
tools changelog update-release-notes --template-only
|
||||
|
@ -126,7 +126,7 @@ on:
|
|||
shell: bash
|
||||
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
|
||||
run: |
|
||||
if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then
|
||||
if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then
|
||||
tools changelog update-release-notes --draft <%- if prepare_actual_release %> --release <%- endif %> --next-release
|
||||
tools changelog update-release-notes <%- if prepare_actual_release %> --release <%- endif %> --next-release
|
||||
else
|
||||
|
|
7
.github/workflows/templates/layout.yml.jinja
vendored
7
.github/workflows/templates/layout.yml.jinja
vendored
|
@ -91,6 +91,7 @@ jobs:
|
|||
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
|
||||
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
|
||||
releases: ${{ steps.get-salt-releases.outputs.releases }}
|
||||
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
@ -305,6 +306,12 @@ jobs:
|
|||
id: set-cache-seed
|
||||
run: |
|
||||
echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get Release Changelog Target
|
||||
id: get-release-changelog-target
|
||||
run: |
|
||||
tools ci get-release-changelog-target ${{ github.event_name }}
|
||||
|
||||
<%- endblock prepare_workflow_job %>
|
||||
<%- endif %>
|
||||
|
||||
|
|
|
@ -15,8 +15,6 @@
|
|||
("debian-10", "Debian 10", "x86_64"),
|
||||
("debian-11", "Debian 11", "x86_64"),
|
||||
("debian-11-arm64", "Debian 11 Arm64", "aarch64"),
|
||||
("fedora-36", "Fedora 36", "x86_64"),
|
||||
("fedora-36-arm64", "Fedora 36 Arm64", "aarch64"),
|
||||
("fedora-37", "Fedora 37", "x86_64"),
|
||||
("fedora-37-arm64", "Fedora 37 Arm64", "aarch64"),
|
||||
("fedora-38", "Fedora 38", "x86_64"),
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
("debian-10", "Debian 10", "x86_64", "deb"),
|
||||
("debian-11", "Debian 11", "x86_64", "deb"),
|
||||
("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"),
|
||||
("fedora-36", "Fedora 36", "x86_64", "rpm"),
|
||||
("fedora-37", "Fedora 37", "x86_64", "rpm"),
|
||||
("fedora-38", "Fedora 38", "x86_64", "rpm"),
|
||||
("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"),
|
||||
|
|
|
@ -59,7 +59,6 @@
|
|||
("debian-10", "Debian 10", "x86_64"),
|
||||
("debian-11", "Debian 11", "x86_64"),
|
||||
("debian-11-arm64", "Debian 11 Arm64", "aarch64"),
|
||||
("fedora-36", "Fedora 36", "x86_64"),
|
||||
("fedora-37", "Fedora 37", "x86_64"),
|
||||
("fedora-38", "Fedora 38", "x86_64"),
|
||||
("opensuse-15", "Opensuse 15", "x86_64"),
|
||||
|
|
|
@ -224,7 +224,7 @@ jobs:
|
|||
run: |
|
||||
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
|
||||
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \
|
||||
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs
|
||||
|
||||
- name: Run Package Download Tests
|
||||
|
@ -241,7 +241,7 @@ jobs:
|
|||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \
|
||||
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
|
||||
|
||||
- name: Combine Coverage Reports
|
||||
|
|
|
@ -234,7 +234,7 @@ jobs:
|
|||
run: |
|
||||
tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \
|
||||
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \
|
||||
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs
|
||||
|
||||
- name: Run Package Download Tests
|
||||
|
@ -252,7 +252,7 @@ jobs:
|
|||
run: |
|
||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||
-E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \
|
||||
-E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \
|
||||
--nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs
|
||||
|
||||
- name: Combine Coverage Reports
|
||||
|
|
2
changelog/64169.fixed.md
Normal file
2
changelog/64169.fixed.md
Normal file
|
@ -0,0 +1,2 @@
|
|||
Call global logger when catching pip.list exceptions in states.pip.installed
|
||||
Rename global logger `log` to `logger` inside pip_state
|
1
changelog/64232.fixed.md
Normal file
1
changelog/64232.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed x509_v2 `create_private_key`/`create_crl` unknown kwargs: __pub_fun...
|
|
@ -1 +1 @@
|
|||
centosstream-9-x86_64: ami-044545f7a74d46acc
|
||||
centosstream-9-x86_64: ami-0bd92f4dca5d74017
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
{
|
||||
"almalinux-8-arm64": {
|
||||
"ami": "ami-0fc1e14bf9ff422aa",
|
||||
"ami": "ami-05c1d3dbdeeb94bc6",
|
||||
"ami_description": "CI Image of AlmaLinux 8 arm64",
|
||||
"ami_name": "salt-project/ci/almalinux/8/arm64/20230418.1731",
|
||||
"ami_name": "salt-project/ci/almalinux/8/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -10,9 +10,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"almalinux-8": {
|
||||
"ami": "ami-0bae4158c1f126271",
|
||||
"ami": "ami-0ec1cbc531f10105b",
|
||||
"ami_description": "CI Image of AlmaLinux 8 x86_64",
|
||||
"ami_name": "salt-project/ci/almalinux/8/x86_64/20230418.1732",
|
||||
"ami_name": "salt-project/ci/almalinux/8/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -20,9 +20,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"almalinux-9-arm64": {
|
||||
"ami": "ami-08f4d0fbf5d53c3ab",
|
||||
"ami": "ami-036c495af9dfcf852",
|
||||
"ami_description": "CI Image of AlmaLinux 9 arm64",
|
||||
"ami_name": "salt-project/ci/almalinux/9/arm64/20230418.1732",
|
||||
"ami_name": "salt-project/ci/almalinux/9/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -30,9 +30,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"almalinux-9": {
|
||||
"ami": "ami-00404c1cc5c5a08bd",
|
||||
"ami": "ami-0dbc7030666419671",
|
||||
"ami_description": "CI Image of AlmaLinux 9 x86_64",
|
||||
"ami_name": "salt-project/ci/almalinux/9/x86_64/20230418.1738",
|
||||
"ami_name": "salt-project/ci/almalinux/9/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -40,9 +40,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"amazonlinux-2-arm64": {
|
||||
"ami": "ami-05fbdb644d06c27b6",
|
||||
"ami": "ami-022232915c2a5f2d0",
|
||||
"ami_description": "CI Image of AmazonLinux 2 arm64",
|
||||
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20230522.0621",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -50,9 +50,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"amazonlinux-2": {
|
||||
"ami": "ami-014171e6c30ec8387",
|
||||
"ami": "ami-0695f87baa5b5ce15",
|
||||
"ami_description": "CI Image of AmazonLinux 2 x86_64",
|
||||
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230418.1718",
|
||||
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230522.0620",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -60,9 +60,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"archlinux-lts": {
|
||||
"ami": "ami-00cff81ed2e2fb0f4",
|
||||
"ami": "ami-0f6424847f98afc04",
|
||||
"ami_description": "CI Image of ArchLinux lts x86_64",
|
||||
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "false",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -70,9 +70,9 @@
|
|||
"ssh_username": "arch"
|
||||
},
|
||||
"centos-7-arm64": {
|
||||
"ami": "ami-051cef43c13fcc0c9",
|
||||
"ami": "ami-0908831c364e33a37",
|
||||
"ami_description": "CI Image of CentOS 7 arm64",
|
||||
"ami_name": "salt-project/ci/centos/7/arm64/20230418.1743",
|
||||
"ami_name": "salt-project/ci/centos/7/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -80,9 +80,9 @@
|
|||
"ssh_username": "centos"
|
||||
},
|
||||
"centos-7": {
|
||||
"ami": "ami-0dcc94e1bea829149",
|
||||
"ami": "ami-0ace33028ada62ddb",
|
||||
"ami_description": "CI Image of CentOS 7 x86_64",
|
||||
"ami_name": "salt-project/ci/centos/7/x86_64/20230418.1743",
|
||||
"ami_name": "salt-project/ci/centos/7/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -90,9 +90,9 @@
|
|||
"ssh_username": "centos"
|
||||
},
|
||||
"centosstream-8-arm64": {
|
||||
"ami": "ami-02783136c1080c782",
|
||||
"ami": "ami-0b30827dc592b2695",
|
||||
"ami_description": "CI Image of CentOSStream 8 arm64",
|
||||
"ami_name": "salt-project/ci/centosstream/8/arm64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/centosstream/8/arm64/20230522.0618",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -100,9 +100,9 @@
|
|||
"ssh_username": "centos"
|
||||
},
|
||||
"centosstream-8": {
|
||||
"ami": "ami-055e35dc7180defad",
|
||||
"ami": "ami-0929882a7e5cfba5f",
|
||||
"ami_description": "CI Image of CentOSStream 8 x86_64",
|
||||
"ami_name": "salt-project/ci/centosstream/8/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/centosstream/8/x86_64/20230522.0618",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -110,9 +110,9 @@
|
|||
"ssh_username": "centos"
|
||||
},
|
||||
"centosstream-9-arm64": {
|
||||
"ami": "ami-06fd13f7c7c702fc4",
|
||||
"ami": "ami-00700fb8821b8b8c7",
|
||||
"ami_description": "CI Image of CentOSStream 9 arm64",
|
||||
"ami_name": "salt-project/ci/centosstream/9/arm64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/centosstream/9/arm64/20230522.0619",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -120,9 +120,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"centosstream-9": {
|
||||
"ami": "ami-044545f7a74d46acc",
|
||||
"ami": "ami-0bd92f4dca5d74017",
|
||||
"ami_description": "CI Image of CentOSStream 9 x86_64",
|
||||
"ami_name": "salt-project/ci/centosstream/9/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/centosstream/9/x86_64/20230522.0619",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -130,9 +130,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"debian-10-arm64": {
|
||||
"ami": "ami-045aedc47e7ddfbf1",
|
||||
"ami": "ami-0f681fc9d5de0c3df",
|
||||
"ami_description": "CI Image of Debian 10 arm64",
|
||||
"ami_name": "salt-project/ci/debian/10/arm64/20230418.1739",
|
||||
"ami_name": "salt-project/ci/debian/10/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "false",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -140,9 +140,9 @@
|
|||
"ssh_username": "admin"
|
||||
},
|
||||
"debian-10": {
|
||||
"ami": "ami-0a205a9361210b291",
|
||||
"ami": "ami-0dcf5610590139238",
|
||||
"ami_description": "CI Image of Debian 10 x86_64",
|
||||
"ami_name": "salt-project/ci/debian/10/x86_64/20230418.1739",
|
||||
"ami_name": "salt-project/ci/debian/10/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -150,9 +150,9 @@
|
|||
"ssh_username": "admin"
|
||||
},
|
||||
"debian-11-arm64": {
|
||||
"ami": "ami-0be71acc27a8efa60",
|
||||
"ami": "ami-062b4bf11a864825c",
|
||||
"ami_description": "CI Image of Debian 11 arm64",
|
||||
"ami_name": "salt-project/ci/debian/11/arm64/20230418.1739",
|
||||
"ami_name": "salt-project/ci/debian/11/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "false",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -160,39 +160,19 @@
|
|||
"ssh_username": "admin"
|
||||
},
|
||||
"debian-11": {
|
||||
"ami": "ami-0ad354da27b34289b",
|
||||
"ami": "ami-0f400e5fa6806bbca",
|
||||
"ami_description": "CI Image of Debian 11 x86_64",
|
||||
"ami_name": "salt-project/ci/debian/11/x86_64/20230418.1742",
|
||||
"ami_name": "salt-project/ci/debian/11/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
"is_windows": "false",
|
||||
"ssh_username": "admin"
|
||||
},
|
||||
"fedora-36-arm64": {
|
||||
"ami": "ami-00c0ab2829c887922",
|
||||
"ami_description": "CI Image of Fedora 36 arm64",
|
||||
"ami_name": "salt-project/ci/fedora/36/arm64/20230418.1726",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
"is_windows": "false",
|
||||
"ssh_username": "fedora"
|
||||
},
|
||||
"fedora-36": {
|
||||
"ami": "ami-0185a1189bff7c771",
|
||||
"ami_description": "CI Image of Fedora 36 x86_64",
|
||||
"ami_name": "salt-project/ci/fedora/36/x86_64/20230418.1726",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
"is_windows": "false",
|
||||
"ssh_username": "fedora"
|
||||
},
|
||||
"fedora-37-arm64": {
|
||||
"ami": "ami-075c52fda843ace1b",
|
||||
"ami": "ami-0d71d6f2b0869842f",
|
||||
"ami_description": "CI Image of Fedora 37 arm64",
|
||||
"ami_name": "salt-project/ci/fedora/37/arm64/20230418.1726",
|
||||
"ami_name": "salt-project/ci/fedora/37/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -200,9 +180,9 @@
|
|||
"ssh_username": "fedora"
|
||||
},
|
||||
"fedora-37": {
|
||||
"ami": "ami-099a68403d6c65733",
|
||||
"ami": "ami-026f494dd4b9d40e8",
|
||||
"ami_description": "CI Image of Fedora 37 x86_64",
|
||||
"ami_name": "salt-project/ci/fedora/37/x86_64/20230418.1726",
|
||||
"ami_name": "salt-project/ci/fedora/37/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -210,9 +190,9 @@
|
|||
"ssh_username": "fedora"
|
||||
},
|
||||
"fedora-38-arm64": {
|
||||
"ami": "ami-02fa22d081a9be052",
|
||||
"ami": "ami-01ba8a7951daf68fb",
|
||||
"ami_description": "CI Image of Fedora 38 arm64",
|
||||
"ami_name": "salt-project/ci/fedora/38/arm64/20230418.1727",
|
||||
"ami_name": "salt-project/ci/fedora/38/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -220,9 +200,9 @@
|
|||
"ssh_username": "fedora"
|
||||
},
|
||||
"fedora-38": {
|
||||
"ami": "ami-0a8d949d0bb15bbc0",
|
||||
"ami": "ami-0699dbe70b69e96aa",
|
||||
"ami_description": "CI Image of Fedora 38 x86_64",
|
||||
"ami_name": "salt-project/ci/fedora/38/x86_64/20230418.1727",
|
||||
"ami_name": "salt-project/ci/fedora/38/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -230,9 +210,9 @@
|
|||
"ssh_username": "fedora"
|
||||
},
|
||||
"opensuse-15": {
|
||||
"ami": "ami-089ac311f924f131f",
|
||||
"ami": "ami-0c594da84f6e1cd96",
|
||||
"ami_description": "CI Image of Opensuse 15 x86_64",
|
||||
"ami_name": "salt-project/ci/opensuse/15/x86_64/20230418.1744",
|
||||
"ami_name": "salt-project/ci/opensuse/15/x86_64/20230522.0619",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -240,9 +220,9 @@
|
|||
"ssh_username": "ec2-user"
|
||||
},
|
||||
"photonos-3": {
|
||||
"ami": "ami-03ce6db789f90957b",
|
||||
"ami": "ami-0db2ebdb9bc3400ef",
|
||||
"ami_description": "CI Image of PhotonOS 3 x86_64",
|
||||
"ami_name": "salt-project/ci/photonos/3/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/photonos/3/x86_64/20230522.0617",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -250,9 +230,9 @@
|
|||
"ssh_username": "root"
|
||||
},
|
||||
"photonos-4": {
|
||||
"ami": "ami-0ef9996c398479d65",
|
||||
"ami": "ami-08a6b6bbf6779a538",
|
||||
"ami_description": "CI Image of PhotonOS 4 x86_64",
|
||||
"ami_name": "salt-project/ci/photonos/4/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/photonos/4/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -260,9 +240,9 @@
|
|||
"ssh_username": "root"
|
||||
},
|
||||
"ubuntu-20.04-arm64": {
|
||||
"ami": "ami-0c4d21e0772489c0d",
|
||||
"ami": "ami-0dccc0de7a38cca90",
|
||||
"ami_description": "CI Image of Ubuntu 20.04 arm64",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230418.1728",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -270,9 +250,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"ubuntu-20.04": {
|
||||
"ami": "ami-09ae6200865b29b9b",
|
||||
"ami": "ami-05e51f893a626b579",
|
||||
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230418.1728",
|
||||
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -280,9 +260,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"ubuntu-22.04-arm64": {
|
||||
"ami": "ami-024fe5d0b838f88f7",
|
||||
"ami": "ami-0c958272da6c09ca6",
|
||||
"ami_description": "CI Image of Ubuntu 22.04 arm64",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230418.1731",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230522.0606",
|
||||
"arch": "arm64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "m6g.large",
|
||||
|
@ -290,9 +270,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"ubuntu-22.04": {
|
||||
"ami": "ami-0d83f00f084d91451",
|
||||
"ami": "ami-09e45f31ccafcdcec",
|
||||
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230418.1732",
|
||||
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.large",
|
||||
|
@ -300,9 +280,9 @@
|
|||
"ssh_username": "ubuntu"
|
||||
},
|
||||
"windows-2016": {
|
||||
"ami": "ami-078d9229cfaf24d1b",
|
||||
"ami": "ami-099db55543619f54a",
|
||||
"ami_description": "CI Image of Windows 2016 x86_64",
|
||||
"ami_name": "salt-project/ci/windows/2016/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/windows/2016/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.xlarge",
|
||||
|
@ -310,9 +290,9 @@
|
|||
"ssh_username": "Administrator"
|
||||
},
|
||||
"windows-2019": {
|
||||
"ami": "ami-0ab20823965e1aa7a",
|
||||
"ami": "ami-0860ee5bc9ee93e13",
|
||||
"ami_description": "CI Image of Windows 2019 x86_64",
|
||||
"ami_name": "salt-project/ci/windows/2019/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/windows/2019/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.xlarge",
|
||||
|
@ -320,9 +300,9 @@
|
|||
"ssh_username": "Administrator"
|
||||
},
|
||||
"windows-2022": {
|
||||
"ami": "ami-054c4cf04c0f31eb1",
|
||||
"ami": "ami-032e3abce2aa98da7",
|
||||
"ami_description": "CI Image of Windows 2022 x86_64",
|
||||
"ami_name": "salt-project/ci/windows/2022/x86_64/20230418.1717",
|
||||
"ami_name": "salt-project/ci/windows/2022/x86_64/20230522.0606",
|
||||
"arch": "x86_64",
|
||||
"cloudwatch-agent-available": "true",
|
||||
"instance_type": "t3a.xlarge",
|
||||
|
|
|
@ -2,3 +2,5 @@ python_version_linux: "3.10.11"
|
|||
python_version_macos: "3.10.11"
|
||||
python_version_windows: "3.10.11"
|
||||
relenv_version: "0.12.3"
|
||||
release-branches:
|
||||
- "3006.x"
|
||||
|
|
10
doc/_themes/saltstack2/layout.html
vendored
10
doc/_themes/saltstack2/layout.html
vendored
|
@ -152,7 +152,7 @@
|
|||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a href="http://saltstack.com/" target="_blank"><img src="{{ pathto('_static/images/SaltStack_white.svg', 1) }}" class="nolightbox" height="40px" width="170px"></a>
|
||||
<a href="https://saltproject.io/" target="_blank"><img src="https://gitlab.com/saltstack/open/salt-branding-guide/-/raw/master/logos/SaltProject_altlogo_teal.png" class="nolightbox" height="44.5px" width="125px"></a>
|
||||
</div>
|
||||
<!-- Collect the nav links, forms, and other content for toggling -->
|
||||
<div class="collapse navbar-collapse" id="navbarCollapse">
|
||||
|
@ -263,17 +263,17 @@
|
|||
<p>You are viewing preview docs for the next major release, {{ next_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p>
|
||||
{% endif %}
|
||||
<br>
|
||||
<p><a href="http://saltstack.com">saltstack.com</a></p>
|
||||
<p><a href="https://saltproject.io/">saltproject.io</a></p>
|
||||
|
||||
<div class="footer">
|
||||
<p>© {{ copyright }} SaltStack. All Rights Reserved, SaltStack Inc. | <a href="http://saltstack.com/privacy-policy" target="_blank">Privacy Policy</a></p>
|
||||
<p>© {{ copyright }} VMware, Inc. | <a href="https://www.vmware.com/help/privacy.html" target="_blank">Privacy Policy</a></p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-sm-6">
|
||||
<!--
|
||||
<a href="https://saltstack.com/saltstack-enterprise/" target="_blank"><img class="nolightbox footer-banner center" src="{{ pathto('_static/images/enterprise_ad.jpg', 1) }}"/></a>
|
||||
-->
|
||||
<a href="https://saltconf.com/ad" target="_blank"><img class="nolightbox footer-banner center" src="https://gitlab.com/saltstack/open/salt-branding-guide/-/raw/master/misc/banners/try_salt_now.png"/></a>
|
||||
<a href="https://docs.saltproject.io/salt/user-guide/en/latest/index.html" target="_blank"><img class="nolightbox footer-banner center" src="https://gitlab.com/saltstack/open/salt-branding-guide/-/raw/master/misc/banners/try_salt_now.png"/></a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
@ -295,7 +295,7 @@
|
|||
{% else %}
|
||||
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
|
||||
{% endif %} #}-->
|
||||
<a href="https://saltconf.com/menu-ad" target="_blank"><img class="nolightbox sidebar-banner center" src="https://gitlab.com/saltstack/open/salt-branding-guide/-/raw/master/misc/banners/saltconf.png"/></a>
|
||||
<a href="https://saltproject.io" target="_blank"><img class="nolightbox sidebar-banner center" src="https://gitlab.com/saltstack/open/salt-branding-guide/-/raw/master/logos/SaltProject_altlogo_teal.png"/></a>
|
||||
|
||||
|
||||
{% if build_type=="next" %}
|
||||
|
|
|
@ -2035,7 +2035,6 @@ Valid options:
|
|||
Top File Settings
|
||||
=================
|
||||
|
||||
These parameters only have an effect if running a masterless minion.
|
||||
|
||||
.. conf_minion:: state_top
|
||||
|
||||
|
|
|
@ -596,14 +596,32 @@ class SaltPkgInstall:
|
|||
self.proc.run("launchctl", "disable", f"system/{service_name}")
|
||||
self.proc.run("launchctl", "bootout", "system", str(plist_file))
|
||||
elif upgrade:
|
||||
env = os.environ.copy()
|
||||
extra_args = []
|
||||
if self.distro_id in ("ubuntu", "debian"):
|
||||
env["DEBIAN_FRONTEND"] = "noninteractive"
|
||||
extra_args = [
|
||||
"-o",
|
||||
"DPkg::Options::=--force-confdef",
|
||||
"-o",
|
||||
"DPkg::Options::=--force-confold",
|
||||
]
|
||||
log.info("Installing packages:\n%s", pprint.pformat(self.pkgs))
|
||||
ret = self.proc.run(self.pkg_mngr, "upgrade", "-y", *self.pkgs)
|
||||
args = extra_args + self.pkgs
|
||||
ret = self.proc.run(
|
||||
self.pkg_mngr,
|
||||
"upgrade",
|
||||
"-y",
|
||||
*args,
|
||||
_timeout=120,
|
||||
env=env,
|
||||
)
|
||||
else:
|
||||
log.info("Installing packages:\n%s", pprint.pformat(self.pkgs))
|
||||
ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs)
|
||||
if not platform.is_darwin() and not platform.is_windows():
|
||||
# Make sure we don't have any trailing references to old package file locations
|
||||
assert "No such file or directory" not in ret.stdout
|
||||
ret.returncode == 0
|
||||
assert "/saltstack/salt/run" not in ret.stdout
|
||||
log.info(ret)
|
||||
self._check_retcode(ret)
|
||||
|
|
|
@ -901,8 +901,11 @@ def create_crl(
|
|||
salt.utils.versions.kwargs_warn_until(["text"], "Potassium")
|
||||
kwargs.pop("text")
|
||||
|
||||
if kwargs:
|
||||
raise SaltInvocationError(f"Unrecognized keyword arguments: {list(kwargs)}")
|
||||
unknown = [kwarg for kwarg in kwargs if not kwarg.startswith("_")]
|
||||
if unknown:
|
||||
raise SaltInvocationError(
|
||||
f"Unrecognized keyword arguments: {list(unknown)}"
|
||||
)
|
||||
|
||||
if days_valid is None:
|
||||
try:
|
||||
|
@ -1235,8 +1238,9 @@ def create_private_key(
|
|||
for x in ignored_params:
|
||||
kwargs.pop(x)
|
||||
|
||||
if kwargs:
|
||||
raise SaltInvocationError(f"Unrecognized keyword arguments: {list(kwargs)}")
|
||||
unknown = [kwarg for kwarg in kwargs if not kwarg.startswith("_")]
|
||||
if unknown:
|
||||
raise SaltInvocationError(f"Unrecognized keyword arguments: {list(unknown)}")
|
||||
|
||||
if encoding not in ["der", "pem", "pkcs12"]:
|
||||
raise CommandExecutionError(
|
||||
|
|
|
@ -114,7 +114,7 @@ if HAS_PIP is True:
|
|||
|
||||
# pylint: enable=import-error
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = "pip"
|
||||
|
@ -189,10 +189,10 @@ def _check_pkg_version_format(pkg):
|
|||
# vcs+URL urls are not properly parsed.
|
||||
# The next line is meant to trigger an AttributeError and
|
||||
# handle lower pip versions
|
||||
log.debug("Installed pip version: %s", pip.__version__)
|
||||
logger.debug("Installed pip version: %s", pip.__version__)
|
||||
install_req = _from_line(pkg)
|
||||
except AttributeError:
|
||||
log.debug("Installed pip version is lower than 1.2")
|
||||
logger.debug("Installed pip version is lower than 1.2")
|
||||
supported_vcs = ("git", "svn", "hg", "bzr")
|
||||
if pkg.startswith(supported_vcs):
|
||||
for vcs in supported_vcs:
|
||||
|
@ -251,7 +251,7 @@ def _check_if_installed(
|
|||
index_url,
|
||||
extra_index_url,
|
||||
pip_list=False,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Takes a package name and version specification (if any) and checks it is
|
||||
|
@ -351,7 +351,7 @@ def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False):
|
|||
making the comparison.
|
||||
"""
|
||||
if HAS_PKG_RESOURCES is False:
|
||||
log.warning(
|
||||
logger.warning(
|
||||
"The pkg_resources packages was not loaded. Please install setuptools."
|
||||
)
|
||||
return None
|
||||
|
@ -367,7 +367,9 @@ def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False):
|
|||
if pkg_resources.parse_version(pkg1) > pkg_resources.parse_version(pkg2):
|
||||
return 1
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.exception(exc)
|
||||
logger.exception(
|
||||
f'Comparison of package versions "{pkg1}" and "{pkg2}" failed: {exc}'
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -418,7 +420,7 @@ def installed(
|
|||
cache_dir=None,
|
||||
no_binary=None,
|
||||
extra_args=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Make sure the package is installed
|
||||
|
@ -852,7 +854,9 @@ def installed(
|
|||
)
|
||||
# If we fail, then just send False, and we'll try again in the next function call
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.exception(exc)
|
||||
logger.exception(
|
||||
f"Pre-caching of PIP packages during states.pip.installed failed by exception from pip.list: {exc}"
|
||||
)
|
||||
pip_list = False
|
||||
|
||||
for prefix, state_pkg_name, version_spec in pkgs_details:
|
||||
|
@ -872,7 +876,7 @@ def installed(
|
|||
index_url,
|
||||
extra_index_url,
|
||||
pip_list,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
# If _check_if_installed result is None, something went wrong with
|
||||
# the command running. This way we keep stateful output.
|
||||
|
@ -978,7 +982,7 @@ def installed(
|
|||
no_cache_dir=no_cache_dir,
|
||||
extra_args=extra_args,
|
||||
disable_version_check=True,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
if pip_install_call and pip_install_call.get("retcode", 1) == 0:
|
||||
|
@ -1043,7 +1047,7 @@ def installed(
|
|||
user=user,
|
||||
cwd=cwd,
|
||||
env_vars=env_vars,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -673,6 +673,35 @@ def test_sign_remote_certificate_copypath(x509_salt_call_cli, cert_args, tmp_pat
|
|||
assert (tmp_path / f"{cert.serial_number:x}.crt").exists()
|
||||
|
||||
|
||||
def test_create_private_key(x509_salt_call_cli):
|
||||
"""
|
||||
Ensure calling from the CLI works as expected and does not complain
|
||||
about unknown internal kwargs (__pub_fun etc).
|
||||
"""
|
||||
ret = x509_salt_call_cli.run("x509.create_private_key")
|
||||
assert ret.returncode == 0
|
||||
assert ret.data
|
||||
assert ret.data.startswith("-----BEGIN PRIVATE KEY-----")
|
||||
|
||||
|
||||
def test_create_crl(x509_salt_call_cli, ca_key, ca_cert, x509_pkidir):
|
||||
"""
|
||||
Ensure calling from the CLI works as expected and does not complain
|
||||
about unknown internal kwargs (__pub_fun etc).
|
||||
"""
|
||||
with pytest.helpers.temp_file("key", ca_key, x509_pkidir) as ca_keyfile:
|
||||
with pytest.helpers.temp_file("cert", ca_cert, x509_pkidir) as ca_certfile:
|
||||
ret = x509_salt_call_cli.run(
|
||||
"x509.create_crl",
|
||||
revoked=[],
|
||||
signing_private_key=str(ca_keyfile),
|
||||
signing_cert=str(ca_certfile),
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data
|
||||
assert ret.data.startswith("-----BEGIN X509 CRL-----")
|
||||
|
||||
|
||||
def _belongs_to(cert_or_pubkey, privkey):
|
||||
if isinstance(cert_or_pubkey, cx509.Certificate):
|
||||
cert_or_pubkey = cert_or_pubkey.public_key()
|
||||
|
|
538
tests/pytests/unit/states/test_linux_acl.py
Normal file
538
tests/pytests/unit/states/test_linux_acl.py
Normal file
|
@ -0,0 +1,538 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
|
||||
Test cases for salt.states.linux_acl
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.linux_acl as linux_acl
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_unless_on_linux(
|
||||
reason="Only run on Linux",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {linux_acl: {}}
|
||||
|
||||
|
||||
def test_present():
|
||||
"""
|
||||
Test to ensure a Linux ACL is present
|
||||
"""
|
||||
maxDiff = None
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_name = "damian"
|
||||
perms = "rwx"
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{name: {acl_type: [{acl_name: {"octal": 5}}]}},
|
||||
{name: {acl_type: [{acl_name: {"octal": 5}}]}},
|
||||
{name: {acl_type: [{acl_name: {"octal": 5}}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{
|
||||
name: {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
name + "/foo": {acl_type: [{acl_name: {"octal": 5}}]},
|
||||
},
|
||||
{
|
||||
name: {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
name + "/foo": {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
},
|
||||
{name: {acl_type: ""}},
|
||||
{
|
||||
name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
},
|
||||
{
|
||||
name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
},
|
||||
{
|
||||
name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
},
|
||||
]
|
||||
)
|
||||
mock_modfacl = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Updated permissions will be applied for {}: r-x -> {}".format(
|
||||
acl_name, perms
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": "r-x",
|
||||
},
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
assert linux_acl.present(name, acl_type, acl_name, perms) == ret
|
||||
# Update - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Updated permissions for {}".format(acl_name)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": "r-x",
|
||||
},
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
assert linux_acl.present(name, acl_type, acl_name, perms) == ret
|
||||
# Update - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(acl_name)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
assert linux_acl.present(name, acl_type, acl_name, perms) == ret
|
||||
# New - test=True
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "New permissions will be applied for {}: {}".format(
|
||||
acl_name, perms
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
assert linux_acl.present(name, acl_type, acl_name, perms) == ret
|
||||
# New - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Applied new permissions for {}".format(acl_name)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
assert linux_acl.present(name, acl_type, acl_name, perms) == ret
|
||||
# New - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(acl_name)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
assert linux_acl.present(name, acl_type, acl_name, perms) == ret
|
||||
|
||||
# New - recurse true
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Updated permissions will be applied for {}: rwx -> {}".format(
|
||||
acl_name, perms
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": "rwx",
|
||||
},
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
assert (
|
||||
linux_acl.present(name, acl_type, acl_name, perms, recurse=True)
|
||||
== ret
|
||||
)
|
||||
|
||||
# New - recurse true - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
assert (
|
||||
linux_acl.present(name, acl_type, acl_name, perms, recurse=True)
|
||||
== ret
|
||||
)
|
||||
|
||||
# No acl type
|
||||
comt = "ACL Type does not exist"
|
||||
ret = {"name": name, "comment": comt, "result": False, "changes": {}}
|
||||
assert linux_acl.present(name, acl_type, acl_name, perms) == ret
|
||||
|
||||
# default recurse false - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
assert (
|
||||
linux_acl.present(
|
||||
name, "d:" + acl_type, acl_name, perms, recurse=False
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
# default recurse false - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
assert (
|
||||
linux_acl.present(
|
||||
name, "d:" + acl_type, acl_name, perms, recurse=False
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
# default recurse true - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
assert (
|
||||
linux_acl.present(
|
||||
name, "d:" + acl_type, acl_name, perms, recurse=True
|
||||
)
|
||||
== ret
|
||||
)
|
||||
|
||||
|
||||
def test_absent():
|
||||
"""
|
||||
Test to ensure a Linux ACL does not exist
|
||||
"""
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_name = "damian"
|
||||
perms = "rwx"
|
||||
|
||||
ret = {"name": name, "result": None, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{name: {acl_type: [{acl_name: {"octal": "A"}}]}},
|
||||
{name: {acl_type: ""}},
|
||||
]
|
||||
)
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Removing permissions"
|
||||
ret.update({"comment": comt})
|
||||
assert linux_acl.absent(name, acl_type, acl_name, perms) == ret
|
||||
|
||||
comt = "ACL Type does not exist"
|
||||
ret.update({"comment": comt, "result": False})
|
||||
assert linux_acl.absent(name, acl_type, acl_name, perms) == ret
|
||||
|
||||
|
||||
def test_list_present():
|
||||
"""
|
||||
Test to ensure a Linux ACL is present
|
||||
"""
|
||||
name = "/root"
|
||||
acl_type = "user"
|
||||
acl_names = ["root", "damian", "homer"]
|
||||
acl_comment = {"owner": "root", "group": "root", "file": "/root"}
|
||||
perms = "rwx"
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}},
|
||||
{acl_names[1]: {"octal": "A"}},
|
||||
{acl_names[2]: {"octal": "A"}},
|
||||
],
|
||||
"comment": acl_comment,
|
||||
}
|
||||
},
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}},
|
||||
{acl_names[1]: {"octal": "A"}},
|
||||
],
|
||||
"comment": acl_comment,
|
||||
}
|
||||
},
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}},
|
||||
{acl_names[1]: {"octal": "A"}},
|
||||
]
|
||||
}
|
||||
},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: ""}},
|
||||
]
|
||||
)
|
||||
mock_modfacl = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Updated permissions will be applied for {}: A -> {}".format(
|
||||
acl_names, perms
|
||||
)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": 7,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": "A",
|
||||
},
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
assert ret == expected
|
||||
|
||||
# Update - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Applied new permissions for {}".format(", ".join(acl_names))
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": "rwx",
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
assert expected == ret
|
||||
|
||||
# Update - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(acl_names)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
assert expected == ret
|
||||
|
||||
# New - test=True
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "New permissions will be applied for {}: {}".format(
|
||||
acl_names, perms
|
||||
)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
assert expected == ret
|
||||
|
||||
# New - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Applied new permissions for {}".format(", ".join(acl_names))
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
assert expected == ret
|
||||
|
||||
# New - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(acl_names)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
assert expected == ret
|
||||
|
||||
# No acl type
|
||||
comt = "ACL Type does not exist"
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"result": False,
|
||||
"changes": {},
|
||||
}
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
assert expected == ret
|
||||
|
||||
|
||||
def test_list_absent():
|
||||
"""
|
||||
Test to ensure a Linux ACL does not exist
|
||||
"""
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_names = ["damian", "homer"]
|
||||
perms = "rwx"
|
||||
|
||||
ret = {"name": name, "result": None, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}, acl_names[1]: {"octal": "A"}}
|
||||
]
|
||||
}
|
||||
},
|
||||
{name: {acl_type: ""}},
|
||||
]
|
||||
)
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Removing permissions"
|
||||
ret.update({"comment": comt})
|
||||
assert linux_acl.list_absent(name, acl_type, acl_names, perms) == ret
|
||||
|
||||
comt = "ACL Type does not exist"
|
||||
ret.update({"comment": comt, "result": False})
|
||||
assert linux_acl.list_absent(name, acl_type, acl_names) == ret
|
||||
|
||||
|
||||
def test_absent_recursive():
|
||||
"""
|
||||
Test to ensure a Linux ACL does not exist
|
||||
"""
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_name = "damian"
|
||||
perms = "rwx"
|
||||
|
||||
ret = {"name": name, "result": None, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
name: {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
name + "/foo": {acl_type: [{acl_name: {"octal": "A"}}]},
|
||||
}
|
||||
]
|
||||
)
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Removing permissions"
|
||||
ret.update({"comment": comt})
|
||||
assert (
|
||||
linux_acl.absent(name, acl_type, acl_name, perms, recurse=True) == ret
|
||||
)
|
71
tests/pytests/unit/states/test_pip.py
Normal file
71
tests/pytests/unit/states/test_pip.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
"""
|
||||
:codeauthor: Eric Graham <eric.graham@vantagepnt.com>
|
||||
"""
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.pip_state as pip_state
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {pip_state: {"__env__": "base", "__opts__": {"test": False}}}
|
||||
|
||||
|
||||
def test_issue_64169(caplog):
|
||||
pkg_to_install = "nonexistent_package"
|
||||
exception_message = "Invalid JSON (test_issue_64169)"
|
||||
|
||||
mock_pip_list = MagicMock(
|
||||
side_effect=[
|
||||
CommandExecutionError(
|
||||
exception_message
|
||||
), # pre-cache the pip list (preinstall)
|
||||
{}, # Checking if the pkg is already installed
|
||||
{pkg_to_install: "100.10.1"}, # Confirming successful installation
|
||||
]
|
||||
)
|
||||
mock_pip_version = MagicMock(return_value="100.10.1")
|
||||
mock_pip_install = MagicMock(return_value={"retcode": 0, "stdout": ""})
|
||||
|
||||
with patch.dict(
|
||||
pip_state.__salt__,
|
||||
{
|
||||
"pip.list": mock_pip_list,
|
||||
"pip.version": mock_pip_version,
|
||||
"pip.install": mock_pip_install,
|
||||
},
|
||||
):
|
||||
with caplog.at_level(logging.WARNING):
|
||||
# Call pip.installed with a specifically 'broken' pip.list.
|
||||
# pip.installed should continue, but log the exception from pip.list.
|
||||
# pip.installed should NOT raise an exception itself.
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
pip_state.installed(
|
||||
name=pkg_to_install,
|
||||
use_wheel=False, # Set False to simplify testing
|
||||
no_use_wheel=False, # '
|
||||
no_binary=False, # '
|
||||
log=None, # Regression will cause this function call to throw an AttributeError
|
||||
)
|
||||
except AttributeError as exc:
|
||||
# Observed behavior in #64169
|
||||
pytest.fail(
|
||||
"Regression on #64169: pip_state.installed seems to be throwing an unexpected AttributeException: "
|
||||
f"{exc}"
|
||||
)
|
||||
|
||||
# Take 64169 further and actually confirm that the exception from pip.list got logged.
|
||||
assert (
|
||||
"Pre-caching of PIP packages during states.pip.installed failed by exception "
|
||||
f"from pip.list: {exception_message}" in caplog.messages
|
||||
)
|
||||
|
||||
# Confirm that the state continued to install the package as expected.
|
||||
# Only check the 'pkgs' parameter of pip.install
|
||||
mock_install_call_args, mock_install_call_kwargs = mock_pip_install.call_args
|
||||
assert mock_install_call_kwargs["pkgs"] == pkg_to_install
|
|
@ -19,6 +19,15 @@ from tests.support.mixins import LoaderModuleMockMixin
|
|||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows(
|
||||
reason=(
|
||||
"Special steps are required for proper SSL validation because "
|
||||
"`easy_install` is too old(and deprecated)."
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
KNOWN_VIRTUALENV_BINARY_NAMES = (
|
||||
"virtualenv",
|
||||
"virtualenv2",
|
||||
|
|
|
@ -1,589 +0,0 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.linux_acl as linux_acl
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
@pytest.mark.skip_unless_on_linux
|
||||
class LinuxAclTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Test cases for salt.states.linux_acl
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {linux_acl: {}}
|
||||
|
||||
# 'present' function tests: 1
|
||||
|
||||
def test_present(self):
|
||||
"""
|
||||
Test to ensure a Linux ACL is present
|
||||
"""
|
||||
self.maxDiff = None
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_name = "damian"
|
||||
perms = "rwx"
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{name: {acl_type: [{acl_name: {"octal": 5}}]}},
|
||||
{name: {acl_type: [{acl_name: {"octal": 5}}]}},
|
||||
{name: {acl_type: [{acl_name: {"octal": 5}}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{
|
||||
name: {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
name + "/foo": {acl_type: [{acl_name: {"octal": 5}}]},
|
||||
},
|
||||
{
|
||||
name: {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
name + "/foo": {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
},
|
||||
{name: {acl_type: ""}},
|
||||
{
|
||||
name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
},
|
||||
{
|
||||
name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
},
|
||||
{
|
||||
name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}},
|
||||
},
|
||||
]
|
||||
)
|
||||
mock_modfacl = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Updated permissions will be applied for {}: r-x -> {}".format(
|
||||
acl_name, perms
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": "r-x",
|
||||
},
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
# Update - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Updated permissions for {}".format(acl_name)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": "r-x",
|
||||
},
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
# Update - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{
|
||||
"acl.modfacl": MagicMock(
|
||||
side_effect=CommandExecutionError("Custom err")
|
||||
)
|
||||
},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(
|
||||
acl_name
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
# New - test=True
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "New permissions will be applied for {}: {}".format(
|
||||
acl_name, perms
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
# New - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Applied new permissions for {}".format(acl_name)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
# New - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{
|
||||
"acl.modfacl": MagicMock(
|
||||
side_effect=CommandExecutionError("Custom err")
|
||||
)
|
||||
},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(
|
||||
acl_name
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
|
||||
# New - recurse true
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = (
|
||||
"Updated permissions will be applied for {}: rwx -> {}".format(
|
||||
acl_name, perms
|
||||
)
|
||||
)
|
||||
ret = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": acl_name,
|
||||
"acl_type": acl_type,
|
||||
"perms": "rwx",
|
||||
},
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(
|
||||
name, acl_type, acl_name, perms, recurse=True
|
||||
),
|
||||
ret,
|
||||
)
|
||||
|
||||
# New - recurse true - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(
|
||||
name, acl_type, acl_name, perms, recurse=True
|
||||
),
|
||||
ret,
|
||||
)
|
||||
|
||||
# No acl type
|
||||
comt = "ACL Type does not exist"
|
||||
ret = {"name": name, "comment": comt, "result": False, "changes": {}}
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
|
||||
# default recurse false - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(
|
||||
name, "d:" + acl_type, acl_name, perms, recurse=False
|
||||
),
|
||||
ret,
|
||||
)
|
||||
|
||||
# default recurse false - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(
|
||||
name, "d:" + acl_type, acl_name, perms, recurse=False
|
||||
),
|
||||
ret,
|
||||
)
|
||||
|
||||
# default recurse true - nothing to do
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Permissions are in the desired state"
|
||||
ret = {"name": name, "comment": comt, "changes": {}, "result": True}
|
||||
|
||||
self.assertDictEqual(
|
||||
linux_acl.present(
|
||||
name, "d:" + acl_type, acl_name, perms, recurse=True
|
||||
),
|
||||
ret,
|
||||
)
|
||||
|
||||
# 'absent' function tests: 2
|
||||
|
||||
def test_absent(self):
|
||||
"""
|
||||
Test to ensure a Linux ACL does not exist
|
||||
"""
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_name = "damian"
|
||||
perms = "rwx"
|
||||
|
||||
ret = {"name": name, "result": None, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{name: {acl_type: [{acl_name: {"octal": "A"}}]}},
|
||||
{name: {acl_type: ""}},
|
||||
]
|
||||
)
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Removing permissions"
|
||||
ret.update({"comment": comt})
|
||||
self.assertDictEqual(
|
||||
linux_acl.absent(name, acl_type, acl_name, perms), ret
|
||||
)
|
||||
|
||||
comt = "ACL Type does not exist"
|
||||
ret.update({"comment": comt, "result": False})
|
||||
self.assertDictEqual(linux_acl.absent(name, acl_type, acl_name, perms), ret)
|
||||
|
||||
# 'list_present' function tests: 1
|
||||
|
||||
def test_list_present(self):
|
||||
"""
|
||||
Test to ensure a Linux ACL is present
|
||||
"""
|
||||
self.maxDiff = None
|
||||
name = "/root"
|
||||
acl_type = "user"
|
||||
acl_names = ["root", "damian", "homer"]
|
||||
acl_comment = {"owner": "root", "group": "root", "file": "/root"}
|
||||
perms = "rwx"
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}},
|
||||
{acl_names[1]: {"octal": "A"}},
|
||||
{acl_names[2]: {"octal": "A"}},
|
||||
],
|
||||
"comment": acl_comment,
|
||||
}
|
||||
},
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}},
|
||||
{acl_names[1]: {"octal": "A"}},
|
||||
],
|
||||
"comment": acl_comment,
|
||||
}
|
||||
},
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}},
|
||||
{acl_names[1]: {"octal": "A"}},
|
||||
]
|
||||
}
|
||||
},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: [{}]}},
|
||||
{name: {acl_type: ""}},
|
||||
]
|
||||
)
|
||||
mock_modfacl = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
# Update - test=True
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Updated permissions will be applied for {}: A -> {}".format(
|
||||
acl_names, perms
|
||||
)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": 7,
|
||||
},
|
||||
"old": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": "A",
|
||||
},
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
self.assertDictEqual(ret, expected)
|
||||
|
||||
# Update - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Applied new permissions for {}".format(", ".join(acl_names))
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": "rwx",
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
self.assertDictEqual(expected, ret)
|
||||
|
||||
# Update - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{
|
||||
"acl.modfacl": MagicMock(
|
||||
side_effect=CommandExecutionError("Custom err")
|
||||
)
|
||||
},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(
|
||||
acl_names
|
||||
)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
self.assertDictEqual(expected, ret)
|
||||
|
||||
# New - test=True
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "New permissions will be applied for {}: {}".format(
|
||||
acl_names, perms
|
||||
)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
self.assertDictEqual(expected, ret)
|
||||
|
||||
# New - test=False
|
||||
with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Applied new permissions for {}".format(", ".join(acl_names))
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {
|
||||
"new": {
|
||||
"acl_name": ", ".join(acl_names),
|
||||
"acl_type": acl_type,
|
||||
"perms": perms,
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
}
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
self.assertDictEqual(expected, ret)
|
||||
|
||||
# New - modfacl error
|
||||
with patch.dict(
|
||||
linux_acl.__salt__,
|
||||
{
|
||||
"acl.modfacl": MagicMock(
|
||||
side_effect=CommandExecutionError("Custom err")
|
||||
)
|
||||
},
|
||||
):
|
||||
with patch.dict(linux_acl.__opts__, {"test": False}):
|
||||
comt = "Error updating permissions for {}: Custom err".format(
|
||||
acl_names
|
||||
)
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"changes": {},
|
||||
"result": False,
|
||||
}
|
||||
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
self.assertDictEqual(expected, ret)
|
||||
|
||||
# No acl type
|
||||
comt = "ACL Type does not exist"
|
||||
expected = {
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"result": False,
|
||||
"changes": {},
|
||||
}
|
||||
ret = linux_acl.list_present(name, acl_type, acl_names, perms)
|
||||
self.assertDictEqual(expected, ret)
|
||||
|
||||
# 'list_absent' function tests: 2
|
||||
|
||||
def test_list_absent(self):
|
||||
"""
|
||||
Test to ensure a Linux ACL does not exist
|
||||
"""
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_names = ["damian", "homer"]
|
||||
perms = "rwx"
|
||||
|
||||
ret = {"name": name, "result": None, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
name: {
|
||||
acl_type: [
|
||||
{acl_names[0]: {"octal": "A"}, acl_names[1]: {"octal": "A"}}
|
||||
]
|
||||
}
|
||||
},
|
||||
{name: {acl_type: ""}},
|
||||
]
|
||||
)
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Removing permissions"
|
||||
ret.update({"comment": comt})
|
||||
self.assertDictEqual(
|
||||
linux_acl.list_absent(name, acl_type, acl_names, perms), ret
|
||||
)
|
||||
|
||||
comt = "ACL Type does not exist"
|
||||
ret.update({"comment": comt, "result": False})
|
||||
self.assertDictEqual(linux_acl.list_absent(name, acl_type, acl_names), ret)
|
||||
|
||||
def test_absent_recursive(self):
|
||||
"""
|
||||
Test to ensure a Linux ACL does not exist
|
||||
"""
|
||||
name = "/root"
|
||||
acl_type = "users"
|
||||
acl_name = "damian"
|
||||
perms = "rwx"
|
||||
|
||||
ret = {"name": name, "result": None, "comment": "", "changes": {}}
|
||||
|
||||
mock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
name: {acl_type: [{acl_name: {"octal": 7}}]},
|
||||
name + "/foo": {acl_type: [{acl_name: {"octal": "A"}}]},
|
||||
}
|
||||
]
|
||||
)
|
||||
with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}):
|
||||
with patch.dict(linux_acl.__opts__, {"test": True}):
|
||||
comt = "Removing permissions"
|
||||
ret.update({"comment": comt})
|
||||
self.assertDictEqual(
|
||||
linux_acl.absent(name, acl_type, acl_name, perms, recurse=True), ret
|
||||
)
|
|
@ -10,6 +10,15 @@ import salt.utils.path
|
|||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.unit.modules.test_zcbuildout import KNOWN_VIRTUALENV_BINARY_NAMES, Base
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows(
|
||||
reason=(
|
||||
"Special steps are required for proper SSL validation because "
|
||||
"`easy_install` is too old(and deprecated)."
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.skip_if_binaries_missing(*KNOWN_VIRTUALENV_BINARY_NAMES, check_all=False)
|
||||
@pytest.mark.requires_network
|
||||
|
|
|
@ -8,6 +8,8 @@ ptscripts.register_tools_module("tools.docs")
|
|||
ptscripts.register_tools_module("tools.pkg")
|
||||
ptscripts.register_tools_module("tools.pkg.repo")
|
||||
ptscripts.register_tools_module("tools.pkg.build")
|
||||
ptscripts.register_tools_module("tools.pkg.repo.create")
|
||||
ptscripts.register_tools_module("tools.pkg.repo.publish")
|
||||
ptscripts.register_tools_module("tools.pre_commit")
|
||||
ptscripts.register_tools_module("tools.release")
|
||||
ptscripts.register_tools_module("tools.vm")
|
||||
|
|
55
tools/ci.py
55
tools/ci.py
|
@ -11,6 +11,7 @@ import pathlib
|
|||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import yaml
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
|
@ -672,3 +673,57 @@ def get_releases(ctx: Context, repository: str = "saltstack/salt"):
|
|||
wfh.write(f"latest-release={latest}\n")
|
||||
wfh.write(f"releases={json.dumps(str_releases)}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
@ci.command(
|
||||
name="get-release-changelog-target",
|
||||
arguments={
|
||||
"event_name": {
|
||||
"help": "The name of the GitHub event being processed.",
|
||||
},
|
||||
},
|
||||
)
|
||||
def get_release_changelog_target(ctx: Context, event_name: str):
|
||||
"""
|
||||
Define which kind of release notes should be generated, next minor or major.
|
||||
"""
|
||||
gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None
|
||||
if gh_event_path is None:
|
||||
ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert gh_event_path is not None
|
||||
|
||||
try:
|
||||
gh_event = json.loads(open(gh_event_path).read())
|
||||
except Exception as exc:
|
||||
ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc)
|
||||
ctx.exit(1)
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
ctx.warn("The 'GITHUB_OUTPUT' variable is not set.")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert github_output is not None
|
||||
|
||||
shared_context = yaml.safe_load(
|
||||
tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text()
|
||||
)
|
||||
release_branches = shared_context["release-branches"]
|
||||
|
||||
release_changelog_target = "next-major-release"
|
||||
if event_name == "pull_request":
|
||||
if gh_event["pull_request"]["base"]["ref"] in release_branches:
|
||||
release_changelog_target = "next-minor-release"
|
||||
|
||||
else:
|
||||
for branch_name in release_branches:
|
||||
if branch_name in gh_event["ref"]:
|
||||
release_changelog_target = "next-minor-release"
|
||||
break
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"release-changelog-target={release_changelog_target}\n")
|
||||
ctx.exit(0)
|
||||
|
|
|
@ -30,10 +30,7 @@ build = command_group(
|
|||
|
||||
|
||||
def _get_shared_constants():
|
||||
shared_constants = (
|
||||
tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml"
|
||||
)
|
||||
return yaml.safe_load(shared_constants.read_text())
|
||||
return yaml.safe_load(tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text())
|
||||
|
||||
|
||||
@build.command(
|
||||
|
|
1906
tools/pkg/repo.py
1906
tools/pkg/repo.py
File diff suppressed because it is too large
Load diff
181
tools/pkg/repo/__init__.py
Normal file
181
tools/pkg/repo/__init__.py
Normal file
|
@ -0,0 +1,181 @@
|
|||
"""
|
||||
These commands are used to build the package repository files.
|
||||
"""
|
||||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.pkg
|
||||
import tools.utils
|
||||
from tools.utils import Version, get_salt_releases
|
||||
|
||||
try:
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
except ImportError:
|
||||
print(
|
||||
"\nPlease run 'python -m pip install -r "
|
||||
"requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info),
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
raise
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
repo = command_group(
|
||||
name="repo",
|
||||
help="Packaging Repository Related Commands",
|
||||
description=__doc__,
|
||||
parent="pkg",
|
||||
)
|
||||
|
||||
|
||||
@repo.command(name="backup-previous-releases")
|
||||
def backup_previous_releases(ctx: Context):
|
||||
"""
|
||||
Backup release bucket.
|
||||
"""
|
||||
_rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME)
|
||||
ctx.info("Done")
|
||||
|
||||
|
||||
@repo.command(name="restore-previous-releases")
|
||||
def restore_previous_releases(ctx: Context):
|
||||
"""
|
||||
Restore release bucket from backup.
|
||||
"""
|
||||
_rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME)
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is not None:
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"backup-complete=true\n")
|
||||
ctx.info("Done")
|
||||
|
||||
|
||||
def _rclone(ctx: Context, src: str, dst: str):
|
||||
rclone = shutil.which("rclone")
|
||||
if not rclone:
|
||||
ctx.error("Could not find the rclone binary")
|
||||
ctx.exit(1)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert rclone
|
||||
|
||||
env = os.environ.copy()
|
||||
env["RCLONE_CONFIG_S3_TYPE"] = "s3"
|
||||
cmdline: list[str] = [
|
||||
rclone,
|
||||
"sync",
|
||||
"--auto-confirm",
|
||||
"--human-readable",
|
||||
"--checksum",
|
||||
"--color=always",
|
||||
"--metadata",
|
||||
"--s3-env-auth",
|
||||
"--s3-location-constraint=us-west-2",
|
||||
"--s3-provider=AWS",
|
||||
"--s3-region=us-west-2",
|
||||
"--stats-file-name-length=0",
|
||||
"--stats-one-line",
|
||||
"--stats=5s",
|
||||
"--transfers=50",
|
||||
"--fast-list",
|
||||
"--verbose",
|
||||
]
|
||||
if src == tools.utils.RELEASE_BUCKET_NAME:
|
||||
cmdline.append("--s3-storage-class=INTELLIGENT_TIERING")
|
||||
cmdline.extend([f"s3://{src}", f"s3://{dst}"])
|
||||
ctx.info(f"Running: {' '.join(cmdline)}")
|
||||
ret = ctx.run(*cmdline, env=env, check=False)
|
||||
if ret.returncode:
|
||||
ctx.error(f"Failed to sync from s3://{src} to s3://{dst}")
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@repo.command(
|
||||
name="confirm-unreleased",
|
||||
arguments={
|
||||
"salt_version": {
|
||||
"help": "The salt version to check",
|
||||
},
|
||||
"repository": {
|
||||
"help": (
|
||||
"The full repository name, ie, 'saltstack/salt' on GitHub "
|
||||
"to run the checks against."
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def confirm_unreleased(
|
||||
ctx: Context, salt_version: str, repository: str = "saltstack/salt"
|
||||
):
|
||||
"""
|
||||
Confirm that the passed version is not yet tagged and/or released.
|
||||
"""
|
||||
releases = get_salt_releases(ctx, repository)
|
||||
if Version(salt_version) in releases:
|
||||
ctx.error(f"There's already a '{salt_version}' tag or github release.")
|
||||
ctx.exit(1)
|
||||
ctx.info(f"Could not find a release for Salt Version '{salt_version}'")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
@repo.command(
|
||||
name="confirm-staged",
|
||||
arguments={
|
||||
"salt_version": {
|
||||
"help": "The salt version to check",
|
||||
},
|
||||
"repository": {
|
||||
"help": (
|
||||
"The full repository name, ie, 'saltstack/salt' on GitHub "
|
||||
"to run the checks against."
|
||||
)
|
||||
},
|
||||
},
|
||||
)
|
||||
def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"):
|
||||
"""
|
||||
Confirm that the passed version has been staged for release.
|
||||
"""
|
||||
s3 = boto3.client("s3")
|
||||
repo_release_files_path = pathlib.Path(
|
||||
f"release-artifacts/{salt_version}/.release-files.json"
|
||||
)
|
||||
repo_release_symlinks_path = pathlib.Path(
|
||||
f"release-artifacts/{salt_version}/.release-symlinks.json"
|
||||
)
|
||||
for remote_path in (repo_release_files_path, repo_release_symlinks_path):
|
||||
try:
|
||||
bucket_name = tools.utils.STAGING_BUCKET_NAME
|
||||
ctx.info(
|
||||
f"Checking for the presence of {remote_path} on bucket {bucket_name} ..."
|
||||
)
|
||||
s3.head_object(
|
||||
Bucket=bucket_name,
|
||||
Key=str(remote_path),
|
||||
)
|
||||
except ClientError as exc:
|
||||
if "Error" not in exc.response:
|
||||
log.exception(f"Could not get information about {remote_path}: {exc}")
|
||||
ctx.exit(1)
|
||||
if exc.response["Error"]["Code"] == "404":
|
||||
ctx.error(f"Could not find {remote_path} in bucket.")
|
||||
ctx.exit(1)
|
||||
if exc.response["Error"]["Code"] == "400":
|
||||
ctx.error(f"Could get information about {remote_path}: {exc}")
|
||||
ctx.exit(1)
|
||||
log.exception(f"Error getting information about {remote_path}: {exc}")
|
||||
ctx.exit(1)
|
||||
ctx.info(f"Version {salt_version} has been staged for release")
|
||||
ctx.exit(0)
|
1038
tools/pkg/repo/create.py
Normal file
1038
tools/pkg/repo/create.py
Normal file
File diff suppressed because it is too large
Load diff
653
tools/pkg/repo/publish.py
Normal file
653
tools/pkg/repo/publish.py
Normal file
|
@ -0,0 +1,653 @@
|
|||
"""
|
||||
These commands are used to build the package repository files.
|
||||
"""
|
||||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import textwrap
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import packaging.version
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.pkg
|
||||
import tools.utils
|
||||
from tools.utils import (
|
||||
Version,
|
||||
create_full_repo_path,
|
||||
get_repo_json_file_contents,
|
||||
get_salt_releases,
|
||||
parse_versions,
|
||||
)
|
||||
|
||||
try:
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
except ImportError:
|
||||
print(
|
||||
"\nPlease run 'python -m pip install -r "
|
||||
"requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info),
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
raise
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
publish = command_group(
|
||||
name="publish",
|
||||
help="Packaging Repository Publication Related Commands",
|
||||
parent=["pkg", "repo"],
|
||||
)
|
||||
|
||||
|
||||
@publish.command(
|
||||
arguments={
|
||||
"repo_path": {
|
||||
"help": "Local path for the repository that shall be published.",
|
||||
},
|
||||
"salt_version": {
|
||||
"help": "The salt version of the repository to publish",
|
||||
"required": True,
|
||||
},
|
||||
}
|
||||
)
|
||||
def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None):
|
||||
"""
|
||||
Publish to the nightly bucket.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert salt_version is not None
|
||||
_publish_repo(
|
||||
ctx, repo_path=repo_path, nightly_build=True, salt_version=salt_version
|
||||
)
|
||||
|
||||
|
||||
@publish.command(
|
||||
arguments={
|
||||
"repo_path": {
|
||||
"help": "Local path for the repository that shall be published.",
|
||||
},
|
||||
"salt_version": {
|
||||
"help": "The salt version of the repository to publish",
|
||||
"required": True,
|
||||
},
|
||||
}
|
||||
)
|
||||
def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None):
|
||||
"""
|
||||
Publish to the staging bucket.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert salt_version is not None
|
||||
_publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version)
|
||||
|
||||
|
||||
@publish.command(
|
||||
arguments={
|
||||
"salt_version": {
|
||||
"help": "The salt version to release.",
|
||||
},
|
||||
}
|
||||
)
|
||||
def release(ctx: Context, salt_version: str):
|
||||
"""
|
||||
Publish to the release bucket.
|
||||
"""
|
||||
if "rc" in salt_version:
|
||||
bucket_folder = "salt_rc/salt/py3"
|
||||
else:
|
||||
bucket_folder = "salt/py3"
|
||||
|
||||
files_to_copy: list[str]
|
||||
directories_to_delete: list[str] = []
|
||||
|
||||
ctx.info("Grabbing remote file listing of files to copy...")
|
||||
s3 = boto3.client("s3")
|
||||
repo_release_files_path = pathlib.Path(
|
||||
f"release-artifacts/{salt_version}/.release-files.json"
|
||||
)
|
||||
repo_release_symlinks_path = pathlib.Path(
|
||||
f"release-artifacts/{salt_version}/.release-symlinks.json"
|
||||
)
|
||||
with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd:
|
||||
local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name
|
||||
try:
|
||||
bucket_name = tools.utils.STAGING_BUCKET_NAME
|
||||
with local_release_files_path.open("wb") as wfh:
|
||||
ctx.info(
|
||||
f"Downloading {repo_release_files_path} from bucket {bucket_name} ..."
|
||||
)
|
||||
s3.download_fileobj(
|
||||
Bucket=bucket_name,
|
||||
Key=str(repo_release_files_path),
|
||||
Fileobj=wfh,
|
||||
)
|
||||
files_to_copy = json.loads(local_release_files_path.read_text())
|
||||
except ClientError as exc:
|
||||
if "Error" not in exc.response:
|
||||
log.exception(f"Error downloading {repo_release_files_path}: {exc}")
|
||||
ctx.exit(1)
|
||||
if exc.response["Error"]["Code"] == "404":
|
||||
ctx.error(f"Could not find {repo_release_files_path} in bucket.")
|
||||
ctx.exit(1)
|
||||
if exc.response["Error"]["Code"] == "400":
|
||||
ctx.error(
|
||||
f"Could not download {repo_release_files_path} from bucket: {exc}"
|
||||
)
|
||||
ctx.exit(1)
|
||||
log.exception(f"Error downloading {repo_release_files_path}: {exc}")
|
||||
ctx.exit(1)
|
||||
local_release_symlinks_path = (
|
||||
pathlib.Path(tsd) / repo_release_symlinks_path.name
|
||||
)
|
||||
try:
|
||||
with local_release_symlinks_path.open("wb") as wfh:
|
||||
ctx.info(
|
||||
f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..."
|
||||
)
|
||||
s3.download_fileobj(
|
||||
Bucket=bucket_name,
|
||||
Key=str(repo_release_symlinks_path),
|
||||
Fileobj=wfh,
|
||||
)
|
||||
directories_to_delete = json.loads(local_release_symlinks_path.read_text())
|
||||
except ClientError as exc:
|
||||
if "Error" not in exc.response:
|
||||
log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}")
|
||||
ctx.exit(1)
|
||||
if exc.response["Error"]["Code"] == "404":
|
||||
ctx.error(f"Could not find {repo_release_symlinks_path} in bucket.")
|
||||
ctx.exit(1)
|
||||
if exc.response["Error"]["Code"] == "400":
|
||||
ctx.error(
|
||||
f"Could not download {repo_release_symlinks_path} from bucket: {exc}"
|
||||
)
|
||||
ctx.exit(1)
|
||||
log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}")
|
||||
ctx.exit(1)
|
||||
|
||||
if directories_to_delete:
|
||||
with tools.utils.create_progress_bar() as progress:
|
||||
task = progress.add_task(
|
||||
"Deleting directories to override.",
|
||||
total=len(directories_to_delete),
|
||||
)
|
||||
for directory in directories_to_delete:
|
||||
try:
|
||||
objects_to_delete: list[dict[str, str]] = []
|
||||
for path in _get_repo_file_list(
|
||||
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
|
||||
bucket_folder=bucket_folder,
|
||||
glob_match=f"{directory}/**",
|
||||
):
|
||||
objects_to_delete.append({"Key": path})
|
||||
if objects_to_delete:
|
||||
s3.delete_objects(
|
||||
Bucket=tools.utils.RELEASE_BUCKET_NAME,
|
||||
Delete={"Objects": objects_to_delete},
|
||||
)
|
||||
except ClientError:
|
||||
log.exception("Failed to delete remote files")
|
||||
finally:
|
||||
progress.update(task, advance=1)
|
||||
|
||||
already_copied_files: list[str] = []
|
||||
s3 = boto3.client("s3")
|
||||
dot_repo_files = []
|
||||
with tools.utils.create_progress_bar() as progress:
|
||||
task = progress.add_task(
|
||||
"Copying files between buckets", total=len(files_to_copy)
|
||||
)
|
||||
for fpath in files_to_copy:
|
||||
if fpath in already_copied_files:
|
||||
continue
|
||||
if fpath.endswith(".repo"):
|
||||
dot_repo_files.append(fpath)
|
||||
ctx.info(f" * Copying {fpath}")
|
||||
try:
|
||||
s3.copy_object(
|
||||
Bucket=tools.utils.RELEASE_BUCKET_NAME,
|
||||
Key=fpath,
|
||||
CopySource={
|
||||
"Bucket": tools.utils.STAGING_BUCKET_NAME,
|
||||
"Key": fpath,
|
||||
},
|
||||
MetadataDirective="COPY",
|
||||
TaggingDirective="COPY",
|
||||
ServerSideEncryption="AES256",
|
||||
)
|
||||
already_copied_files.append(fpath)
|
||||
except ClientError:
|
||||
log.exception(f"Failed to copy {fpath}")
|
||||
finally:
|
||||
progress.update(task, advance=1)
|
||||
|
||||
# Now let's get the onedir based repositories where we need to update several repo.json
|
||||
major_version = packaging.version.parse(salt_version).major
|
||||
with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd:
|
||||
repo_path = pathlib.Path(tsd)
|
||||
for distro in ("windows", "macos", "onedir"):
|
||||
|
||||
create_repo_path = create_full_repo_path(
|
||||
ctx,
|
||||
repo_path,
|
||||
salt_version,
|
||||
distro=distro,
|
||||
)
|
||||
repo_json_path = create_repo_path.parent.parent / "repo.json"
|
||||
|
||||
release_repo_json = get_repo_json_file_contents(
|
||||
ctx,
|
||||
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
|
||||
repo_path=repo_path,
|
||||
repo_json_path=repo_json_path,
|
||||
)
|
||||
minor_repo_json_path = create_repo_path.parent / "repo.json"
|
||||
|
||||
staging_minor_repo_json = get_repo_json_file_contents(
|
||||
ctx,
|
||||
bucket_name=tools.utils.STAGING_BUCKET_NAME,
|
||||
repo_path=repo_path,
|
||||
repo_json_path=minor_repo_json_path,
|
||||
)
|
||||
release_minor_repo_json = get_repo_json_file_contents(
|
||||
ctx,
|
||||
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
|
||||
repo_path=repo_path,
|
||||
repo_json_path=minor_repo_json_path,
|
||||
)
|
||||
|
||||
release_json = staging_minor_repo_json[salt_version]
|
||||
|
||||
major_version = Version(salt_version).major
|
||||
versions = parse_versions(*list(release_minor_repo_json))
|
||||
ctx.info(
|
||||
f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: "
|
||||
f"{', '.join(str(vs) for vs in versions)}"
|
||||
)
|
||||
minor_versions = [v for v in versions if v.major == major_version]
|
||||
ctx.info(
|
||||
f"Collected versions(Matching major: {major_version}) from "
|
||||
f"{minor_repo_json_path.relative_to(repo_path)}: "
|
||||
f"{', '.join(str(vs) for vs in minor_versions)}"
|
||||
)
|
||||
if not versions:
|
||||
latest_version = Version(salt_version)
|
||||
else:
|
||||
latest_version = versions[0]
|
||||
if not minor_versions:
|
||||
latest_minor_version = Version(salt_version)
|
||||
else:
|
||||
latest_minor_version = minor_versions[0]
|
||||
|
||||
ctx.info(f"Release Version: {salt_version}")
|
||||
ctx.info(f"Latest Repo Version: {latest_version}")
|
||||
ctx.info(f"Latest Release Minor Version: {latest_minor_version}")
|
||||
|
||||
# Add the minor version
|
||||
release_minor_repo_json[salt_version] = release_json
|
||||
|
||||
if latest_version <= salt_version:
|
||||
release_repo_json["latest"] = release_json
|
||||
|
||||
if latest_minor_version <= salt_version:
|
||||
release_minor_repo_json["latest"] = release_json
|
||||
|
||||
ctx.info(f"Writing {minor_repo_json_path} ...")
|
||||
minor_repo_json_path.write_text(
|
||||
json.dumps(release_minor_repo_json, sort_keys=True)
|
||||
)
|
||||
ctx.info(f"Writing {repo_json_path} ...")
|
||||
repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True))
|
||||
|
||||
# And now, let's get the several rpm "*.repo" files to update the base
|
||||
# domain from staging to release
|
||||
release_domain = os.environ.get(
|
||||
"SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io"
|
||||
)
|
||||
for path in dot_repo_files:
|
||||
repo_file_path = repo_path.joinpath(path)
|
||||
repo_file_path.parent.mkdir(exist_ok=True, parents=True)
|
||||
bucket_name = tools.utils.STAGING_BUCKET_NAME
|
||||
try:
|
||||
ret = s3.head_object(Bucket=bucket_name, Key=path)
|
||||
ctx.info(
|
||||
f"Downloading existing '{repo_file_path.relative_to(repo_path)}' "
|
||||
f"file from bucket {bucket_name}"
|
||||
)
|
||||
size = ret["ContentLength"]
|
||||
with repo_file_path.open("wb") as wfh:
|
||||
with tools.utils.create_progress_bar(
|
||||
file_progress=True
|
||||
) as progress:
|
||||
task = progress.add_task(
|
||||
description="Downloading...", total=size
|
||||
)
|
||||
s3.download_fileobj(
|
||||
Bucket=bucket_name,
|
||||
Key=path,
|
||||
Fileobj=wfh,
|
||||
Callback=tools.utils.UpdateProgress(progress, task),
|
||||
)
|
||||
updated_contents = re.sub(
|
||||
r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$",
|
||||
rf"\1=https://{release_domain}/\3",
|
||||
repo_file_path.read_text(),
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:")
|
||||
ctx.print(updated_contents)
|
||||
repo_file_path.write_text(updated_contents)
|
||||
except ClientError as exc:
|
||||
if "Error" not in exc.response:
|
||||
raise
|
||||
if exc.response["Error"]["Code"] != "404":
|
||||
raise
|
||||
ctx.info(f"Could not find {repo_file_path} in bucket {bucket_name}")
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True):
|
||||
for path in filenames:
|
||||
upload_path = pathlib.Path(dirpath, path)
|
||||
relpath = upload_path.relative_to(repo_path)
|
||||
size = upload_path.stat().st_size
|
||||
ctx.info(f" {relpath}")
|
||||
with tools.utils.create_progress_bar(file_progress=True) as progress:
|
||||
task = progress.add_task(description="Uploading...", total=size)
|
||||
s3.upload_file(
|
||||
str(upload_path),
|
||||
tools.utils.RELEASE_BUCKET_NAME,
|
||||
str(relpath),
|
||||
Callback=tools.utils.UpdateProgress(progress, task),
|
||||
)
|
||||
|
||||
|
||||
@publish.command(
|
||||
arguments={
|
||||
"salt_version": {
|
||||
"help": "The salt version to release.",
|
||||
},
|
||||
"key_id": {
|
||||
"help": "The GnuPG key ID used to sign.",
|
||||
"required": True,
|
||||
},
|
||||
"repository": {
|
||||
"help": (
|
||||
"The full repository name, ie, 'saltstack/salt' on GitHub "
|
||||
"to run the checks against."
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
def github(
|
||||
ctx: Context,
|
||||
salt_version: str,
|
||||
key_id: str = None,
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
"""
|
||||
Publish the release on GitHub releases.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert key_id is not None
|
||||
|
||||
s3 = boto3.client("s3")
|
||||
|
||||
# Let's download the release artifacts stored in staging
|
||||
artifacts_path = pathlib.Path.cwd() / "release-artifacts"
|
||||
artifacts_path.mkdir(exist_ok=True)
|
||||
release_artifacts_listing: dict[pathlib.Path, int] = {}
|
||||
continuation_token = None
|
||||
while True:
|
||||
kwargs: dict[str, str] = {}
|
||||
if continuation_token:
|
||||
kwargs["ContinuationToken"] = continuation_token
|
||||
ret = s3.list_objects_v2(
|
||||
Bucket=tools.utils.STAGING_BUCKET_NAME,
|
||||
Prefix=f"release-artifacts/{salt_version}",
|
||||
FetchOwner=False,
|
||||
**kwargs,
|
||||
)
|
||||
contents = ret.pop("Contents", None)
|
||||
if contents is None:
|
||||
break
|
||||
for entry in contents:
|
||||
entry_path = pathlib.Path(entry["Key"])
|
||||
if entry_path.name.startswith("."):
|
||||
continue
|
||||
release_artifacts_listing[entry_path] = entry["Size"]
|
||||
if not ret["IsTruncated"]:
|
||||
break
|
||||
continuation_token = ret["NextContinuationToken"]
|
||||
|
||||
for entry_path, size in release_artifacts_listing.items():
|
||||
ctx.info(f" * {entry_path.name}")
|
||||
local_path = artifacts_path / entry_path.name
|
||||
with local_path.open("wb") as wfh:
|
||||
with tools.utils.create_progress_bar(file_progress=True) as progress:
|
||||
task = progress.add_task(description="Downloading...", total=size)
|
||||
s3.download_fileobj(
|
||||
Bucket=tools.utils.STAGING_BUCKET_NAME,
|
||||
Key=str(entry_path),
|
||||
Fileobj=wfh,
|
||||
Callback=tools.utils.UpdateProgress(progress, task),
|
||||
)
|
||||
|
||||
for artifact in artifacts_path.iterdir():
|
||||
if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"):
|
||||
continue
|
||||
tools.utils.gpg_sign(ctx, key_id, artifact)
|
||||
|
||||
# Export the GPG key in use
|
||||
tools.utils.export_gpg_key(ctx, key_id, artifacts_path)
|
||||
|
||||
release_message = f"""\
|
||||
# Welcome to Salt v{salt_version}
|
||||
|
||||
| :exclamation: ATTENTION |
|
||||
|:-------------------------------------------------------------------------------------------------------------------------|
|
||||
| The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. |
|
||||
| Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`).
|
||||
"""
|
||||
release_message_path = artifacts_path / "gh-release-body.md"
|
||||
release_message_path.write_text(textwrap.dedent(release_message).strip())
|
||||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.")
|
||||
ctx.exit(0)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert github_output is not None
|
||||
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"release-messsage-file={release_message_path.resolve()}\n")
|
||||
|
||||
releases = get_salt_releases(ctx, repository)
|
||||
if Version(salt_version) >= releases[-1]:
|
||||
make_latest = True
|
||||
else:
|
||||
make_latest = False
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"make-latest={json.dumps(make_latest)}\n")
|
||||
|
||||
artifacts_to_upload = []
|
||||
for artifact in artifacts_path.iterdir():
|
||||
if artifact.suffix == ".patch":
|
||||
continue
|
||||
if artifact.name == release_message_path.name:
|
||||
continue
|
||||
artifacts_to_upload.append(str(artifact.resolve()))
|
||||
|
||||
with open(github_output, "a", encoding="utf-8") as wfh:
|
||||
wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n")
|
||||
ctx.exit(0)
|
||||
|
||||
|
||||
def _get_repo_detailed_file_list(
|
||||
bucket_name: str,
|
||||
bucket_folder: str = "",
|
||||
glob_match: str = "**",
|
||||
) -> list[dict[str, Any]]:
|
||||
s3 = boto3.client("s3")
|
||||
listing: list[dict[str, Any]] = []
|
||||
continuation_token = None
|
||||
while True:
|
||||
kwargs: dict[str, str] = {}
|
||||
if continuation_token:
|
||||
kwargs["ContinuationToken"] = continuation_token
|
||||
ret = s3.list_objects_v2(
|
||||
Bucket=bucket_name,
|
||||
Prefix=bucket_folder,
|
||||
FetchOwner=False,
|
||||
**kwargs,
|
||||
)
|
||||
contents = ret.pop("Contents", None)
|
||||
if contents is None:
|
||||
break
|
||||
for entry in contents:
|
||||
if fnmatch.fnmatch(entry["Key"], glob_match):
|
||||
listing.append(entry)
|
||||
if not ret["IsTruncated"]:
|
||||
break
|
||||
continuation_token = ret["NextContinuationToken"]
|
||||
return listing
|
||||
|
||||
|
||||
def _get_repo_file_list(
|
||||
bucket_name: str, bucket_folder: str, glob_match: str
|
||||
) -> list[str]:
|
||||
return [
|
||||
entry["Key"]
|
||||
for entry in _get_repo_detailed_file_list(
|
||||
bucket_name, bucket_folder, glob_match=glob_match
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def _publish_repo(
|
||||
ctx: Context,
|
||||
repo_path: pathlib.Path,
|
||||
salt_version: str,
|
||||
nightly_build: bool = False,
|
||||
stage: bool = False,
|
||||
):
|
||||
"""
|
||||
Publish packaging repositories.
|
||||
"""
|
||||
if nightly_build:
|
||||
bucket_name = tools.utils.RELEASE_BUCKET_NAME
|
||||
elif stage:
|
||||
bucket_name = tools.utils.STAGING_BUCKET_NAME
|
||||
else:
|
||||
bucket_name = tools.utils.RELEASE_BUCKET_NAME
|
||||
|
||||
ctx.info("Preparing upload ...")
|
||||
s3 = boto3.client("s3")
|
||||
to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {}
|
||||
to_upload_paths: list[pathlib.Path] = []
|
||||
symlink_paths: list[str] = []
|
||||
uploaded_files: list[str] = []
|
||||
for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True):
|
||||
for dirname in dirnames:
|
||||
path = pathlib.Path(dirpath, dirname)
|
||||
if not path.is_symlink():
|
||||
continue
|
||||
# This is a symlink, then we need to delete all files under
|
||||
# that directory in S3 because S3 does not understand symlinks
|
||||
# and we would end up adding files to that folder instead of
|
||||
# replacing it.
|
||||
try:
|
||||
relpath = path.relative_to(repo_path)
|
||||
ret = s3.list_objects(
|
||||
Bucket=bucket_name,
|
||||
Prefix=str(relpath),
|
||||
)
|
||||
if "Contents" not in ret:
|
||||
continue
|
||||
objects = []
|
||||
for entry in ret["Contents"]:
|
||||
objects.append({"Key": entry["Key"]})
|
||||
to_delete_paths[path] = objects
|
||||
symlink_paths.append(str(relpath))
|
||||
except ClientError as exc:
|
||||
if "Error" not in exc.response:
|
||||
raise
|
||||
if exc.response["Error"]["Code"] != "404":
|
||||
raise
|
||||
|
||||
for fpath in filenames:
|
||||
path = pathlib.Path(dirpath, fpath)
|
||||
to_upload_paths.append(path)
|
||||
|
||||
with tools.utils.create_progress_bar() as progress:
|
||||
task = progress.add_task(
|
||||
"Deleting directories to override.", total=len(to_delete_paths)
|
||||
)
|
||||
for base, objects in to_delete_paths.items():
|
||||
relpath = base.relative_to(repo_path)
|
||||
bucket_uri = f"s3://{bucket_name}/{relpath}"
|
||||
progress.update(task, description=f"Deleting {bucket_uri}")
|
||||
try:
|
||||
ret = s3.delete_objects(
|
||||
Bucket=bucket_name,
|
||||
Delete={"Objects": objects},
|
||||
)
|
||||
except ClientError:
|
||||
log.exception(f"Failed to delete {bucket_uri}")
|
||||
finally:
|
||||
progress.update(task, advance=1)
|
||||
|
||||
try:
|
||||
ctx.info("Uploading repository ...")
|
||||
for upload_path in to_upload_paths:
|
||||
relpath = upload_path.relative_to(repo_path)
|
||||
size = upload_path.stat().st_size
|
||||
ctx.info(f" {relpath}")
|
||||
with tools.utils.create_progress_bar(file_progress=True) as progress:
|
||||
task = progress.add_task(description="Uploading...", total=size)
|
||||
s3.upload_file(
|
||||
str(upload_path),
|
||||
bucket_name,
|
||||
str(relpath),
|
||||
Callback=tools.utils.UpdateProgress(progress, task),
|
||||
ExtraArgs={
|
||||
"Metadata": {
|
||||
"x-amz-meta-salt-release-version": salt_version,
|
||||
}
|
||||
},
|
||||
)
|
||||
uploaded_files.append(str(relpath))
|
||||
if stage is True:
|
||||
repo_files_path = f"release-artifacts/{salt_version}/.release-files.json"
|
||||
ctx.info(f"Uploading {repo_files_path} ...")
|
||||
s3.put_object(
|
||||
Key=repo_files_path,
|
||||
Bucket=bucket_name,
|
||||
Body=json.dumps(uploaded_files).encode(),
|
||||
Metadata={
|
||||
"x-amz-meta-salt-release-version": salt_version,
|
||||
},
|
||||
)
|
||||
repo_symlinks_path = (
|
||||
f"release-artifacts/{salt_version}/.release-symlinks.json"
|
||||
)
|
||||
ctx.info(f"Uploading {repo_symlinks_path} ...")
|
||||
s3.put_object(
|
||||
Key=repo_symlinks_path,
|
||||
Bucket=bucket_name,
|
||||
Body=json.dumps(symlink_paths).encode(),
|
||||
Metadata={
|
||||
"x-amz-meta-salt-release-version": salt_version,
|
||||
},
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
|
@ -116,10 +116,9 @@ def generate_workflows(ctx: Context):
|
|||
"prepare_workflow_needs": NeedsTracker(),
|
||||
"build_repo_needs": NeedsTracker(),
|
||||
}
|
||||
shared_context_file = (
|
||||
tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml"
|
||||
shared_context = yaml.safe_load(
|
||||
tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text()
|
||||
)
|
||||
shared_context = yaml.safe_load(shared_context_file.read_text())
|
||||
for key, value in shared_context.items():
|
||||
context[key] = value
|
||||
loaded_template = env.get_template(template_path.name)
|
||||
|
|
130
tools/utils.py
130
tools/utils.py
|
@ -1,8 +1,12 @@
|
|||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
import packaging.version
|
||||
from ptscripts import Context
|
||||
|
@ -16,12 +20,27 @@ from rich.progress import (
|
|||
TransferSpeedColumn,
|
||||
)
|
||||
|
||||
try:
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
except ImportError:
|
||||
print(
|
||||
"\nPlease run 'python -m pip install -r "
|
||||
"requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info),
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
raise
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
|
||||
SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod"
|
||||
STAGING_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-staging"
|
||||
RELEASE_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-release"
|
||||
BACKUP_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-backup"
|
||||
SHARED_WORKFLOW_CONTEXT_FILEPATH = (
|
||||
REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml"
|
||||
)
|
||||
|
||||
|
||||
class UpdateProgress:
|
||||
|
@ -169,3 +188,114 @@ def get_salt_releases(ctx: Context, repository: str) -> list[Version]:
|
|||
# We're not going to parse dash or docs releases
|
||||
versions.add(Version(name))
|
||||
return sorted(versions)
|
||||
|
||||
|
||||
def parse_versions(*versions: str) -> list[Version]:
|
||||
_versions = []
|
||||
for version in set(versions):
|
||||
if version == "latest":
|
||||
continue
|
||||
_versions.append(Version(version))
|
||||
if _versions:
|
||||
_versions.sort(reverse=True)
|
||||
return _versions
|
||||
|
||||
|
||||
def get_repo_json_file_contents(
|
||||
ctx: Context,
|
||||
bucket_name: str,
|
||||
repo_path: pathlib.Path,
|
||||
repo_json_path: pathlib.Path,
|
||||
) -> dict[str, Any]:
|
||||
s3 = boto3.client("s3")
|
||||
repo_json: dict[str, Any] = {}
|
||||
try:
|
||||
ret = s3.head_object(
|
||||
Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path))
|
||||
)
|
||||
ctx.info(
|
||||
f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file "
|
||||
f"from bucket {bucket_name}"
|
||||
)
|
||||
size = ret["ContentLength"]
|
||||
with repo_json_path.open("wb") as wfh:
|
||||
with create_progress_bar(file_progress=True) as progress:
|
||||
task = progress.add_task(description="Downloading...", total=size)
|
||||
s3.download_fileobj(
|
||||
Bucket=bucket_name,
|
||||
Key=str(repo_json_path.relative_to(repo_path)),
|
||||
Fileobj=wfh,
|
||||
Callback=UpdateProgress(progress, task),
|
||||
)
|
||||
with repo_json_path.open() as rfh:
|
||||
repo_json = json.load(rfh)
|
||||
except ClientError as exc:
|
||||
if "Error" not in exc.response:
|
||||
raise
|
||||
if exc.response["Error"]["Code"] != "404":
|
||||
raise
|
||||
ctx.info(f"Could not find {repo_json_path} in bucket {bucket_name}")
|
||||
if repo_json:
|
||||
ctx.print(repo_json, soft_wrap=True)
|
||||
return repo_json
|
||||
|
||||
|
||||
def create_top_level_repo_path(
|
||||
ctx: Context,
|
||||
repo_path: pathlib.Path,
|
||||
salt_version: str,
|
||||
distro: str,
|
||||
distro_version: str | None = None, # pylint: disable=bad-whitespace
|
||||
distro_arch: str | None = None, # pylint: disable=bad-whitespace
|
||||
nightly_build_from: str | None = None, # pylint: disable=bad-whitespace
|
||||
):
|
||||
create_repo_path = repo_path
|
||||
if nightly_build_from:
|
||||
create_repo_path = (
|
||||
create_repo_path
|
||||
/ "salt-dev"
|
||||
/ nightly_build_from
|
||||
/ datetime.utcnow().strftime("%Y-%m-%d")
|
||||
)
|
||||
create_repo_path.mkdir(exist_ok=True, parents=True)
|
||||
with ctx.chdir(create_repo_path.parent):
|
||||
latest_nightly_symlink = pathlib.Path("latest")
|
||||
if not latest_nightly_symlink.exists():
|
||||
ctx.info(
|
||||
f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..."
|
||||
)
|
||||
latest_nightly_symlink.symlink_to(
|
||||
create_repo_path.name, target_is_directory=True
|
||||
)
|
||||
elif "rc" in salt_version:
|
||||
create_repo_path = create_repo_path / "salt_rc"
|
||||
create_repo_path = create_repo_path / "salt" / "py3" / distro
|
||||
if distro_version:
|
||||
create_repo_path = create_repo_path / distro_version
|
||||
if distro_arch:
|
||||
create_repo_path = create_repo_path / distro_arch
|
||||
create_repo_path.mkdir(exist_ok=True, parents=True)
|
||||
return create_repo_path
|
||||
|
||||
|
||||
def create_full_repo_path(
|
||||
ctx: Context,
|
||||
repo_path: pathlib.Path,
|
||||
salt_version: str,
|
||||
distro: str,
|
||||
distro_version: str | None = None, # pylint: disable=bad-whitespace
|
||||
distro_arch: str | None = None, # pylint: disable=bad-whitespace
|
||||
nightly_build_from: str | None = None, # pylint: disable=bad-whitespace
|
||||
):
|
||||
create_repo_path = create_top_level_repo_path(
|
||||
ctx,
|
||||
repo_path,
|
||||
salt_version,
|
||||
distro,
|
||||
distro_version,
|
||||
distro_arch,
|
||||
nightly_build_from=nightly_build_from,
|
||||
)
|
||||
create_repo_path = create_repo_path / "minor" / salt_version
|
||||
create_repo_path.mkdir(exist_ok=True, parents=True)
|
||||
return create_repo_path
|
||||
|
|
75
tools/vm.py
75
tools/vm.py
|
@ -720,41 +720,50 @@ class VM:
|
|||
client = boto3.client("ec2", region_name=self.region_name)
|
||||
# Let's search for the launch template corresponding to this AMI
|
||||
launch_template_name = None
|
||||
next_token = ""
|
||||
try:
|
||||
response = response = client.describe_launch_templates(
|
||||
Filters=[
|
||||
{
|
||||
"Name": "tag:spb:is-golden-image-template",
|
||||
"Values": ["true"],
|
||||
},
|
||||
{
|
||||
"Name": "tag:spb:project",
|
||||
"Values": ["salt-project"],
|
||||
},
|
||||
{
|
||||
"Name": "tag:spb:environment",
|
||||
"Values": [environment],
|
||||
},
|
||||
{
|
||||
"Name": "tag:spb:image-id",
|
||||
"Values": [self.config.ami],
|
||||
},
|
||||
]
|
||||
)
|
||||
log.debug(
|
||||
"Search for launch template response:\n%s", pprint.pformat(response)
|
||||
)
|
||||
for details in response.get("LaunchTemplates"):
|
||||
if launch_template_name is not None:
|
||||
log.warning(
|
||||
"Multiple launch templates for the same AMI. This is not "
|
||||
"supposed to happen. Picked the first one listed: %s",
|
||||
response,
|
||||
)
|
||||
break
|
||||
launch_template_name = details["LaunchTemplateName"]
|
||||
while True:
|
||||
response = response = client.describe_launch_templates(
|
||||
Filters=[
|
||||
{
|
||||
"Name": "tag:spb:is-golden-image-template",
|
||||
"Values": ["true"],
|
||||
},
|
||||
{
|
||||
"Name": "tag:spb:project",
|
||||
"Values": ["salt-project"],
|
||||
},
|
||||
{
|
||||
"Name": "tag:spb:environment",
|
||||
"Values": [environment],
|
||||
},
|
||||
{
|
||||
"Name": "tag:spb:image-id",
|
||||
"Values": [self.config.ami],
|
||||
},
|
||||
],
|
||||
NextToken=next_token,
|
||||
)
|
||||
log.debug(
|
||||
"Search for launch template response:\n%s",
|
||||
pprint.pformat(response),
|
||||
)
|
||||
for details in response.get("LaunchTemplates"):
|
||||
if launch_template_name is not None:
|
||||
log.warning(
|
||||
"Multiple launch templates for the same AMI. This is not "
|
||||
"supposed to happen. Picked the first one listed: %s",
|
||||
response,
|
||||
)
|
||||
break
|
||||
launch_template_name = details["LaunchTemplateName"]
|
||||
|
||||
if launch_template_name is None:
|
||||
if launch_template_name is not None:
|
||||
break
|
||||
|
||||
next_token = response.get("NextToken")
|
||||
if next_token:
|
||||
continue
|
||||
self.ctx.error(f"Could not find a launch template for {self.name!r}")
|
||||
self.ctx.exit(1)
|
||||
except ClientError as exc:
|
||||
|
|
Loading…
Add table
Reference in a new issue